@dinoxx/dinox-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +294 -0
- package/dist/auth/userInfo.d.ts +14 -0
- package/dist/auth/userInfo.js +115 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +32 -0
- package/dist/cliTypes.d.ts +6 -0
- package/dist/cliTypes.js +1 -0
- package/dist/commands/auth/index.d.ts +2 -0
- package/dist/commands/auth/index.js +193 -0
- package/dist/commands/boxes/index.d.ts +2 -0
- package/dist/commands/boxes/index.js +107 -0
- package/dist/commands/boxes/repo.d.ts +21 -0
- package/dist/commands/boxes/repo.js +154 -0
- package/dist/commands/config/index.d.ts +2 -0
- package/dist/commands/config/index.js +67 -0
- package/dist/commands/info/index.d.ts +2 -0
- package/dist/commands/info/index.js +20 -0
- package/dist/commands/notes/index.d.ts +2 -0
- package/dist/commands/notes/index.js +271 -0
- package/dist/commands/notes/repo.d.ts +70 -0
- package/dist/commands/notes/repo.js +674 -0
- package/dist/commands/notes/searchTime.d.ts +9 -0
- package/dist/commands/notes/searchTime.js +85 -0
- package/dist/commands/prompt/index.d.ts +2 -0
- package/dist/commands/prompt/index.js +51 -0
- package/dist/commands/prompt/repo.d.ts +6 -0
- package/dist/commands/prompt/repo.js +18 -0
- package/dist/commands/sync.d.ts +2 -0
- package/dist/commands/sync.js +68 -0
- package/dist/commands/tags/index.d.ts +2 -0
- package/dist/commands/tags/index.js +120 -0
- package/dist/commands/tags/repo.d.ts +14 -0
- package/dist/commands/tags/repo.js +247 -0
- package/dist/config/keys.d.ts +9 -0
- package/dist/config/keys.js +17 -0
- package/dist/config/paths.d.ts +4 -0
- package/dist/config/paths.js +39 -0
- package/dist/config/resolve.d.ts +2 -0
- package/dist/config/resolve.js +56 -0
- package/dist/config/serviceEndpoints.d.ts +3 -0
- package/dist/config/serviceEndpoints.js +3 -0
- package/dist/config/store.d.ts +5 -0
- package/dist/config/store.js +87 -0
- package/dist/config/types.d.ts +51 -0
- package/dist/config/types.js +1 -0
- package/dist/dinox.d.ts +2 -0
- package/dist/dinox.js +50 -0
- package/dist/powersync/connector.d.ts +21 -0
- package/dist/powersync/connector.js +58 -0
- package/dist/powersync/runtime.d.ts +37 -0
- package/dist/powersync/runtime.js +107 -0
- package/dist/powersync/schema/content.d.ts +76 -0
- package/dist/powersync/schema/content.js +76 -0
- package/dist/powersync/schema/index.d.ts +371 -0
- package/dist/powersync/schema/index.js +35 -0
- package/dist/powersync/schema/local.d.ts +68 -0
- package/dist/powersync/schema/local.js +83 -0
- package/dist/powersync/schema/note.d.ts +34 -0
- package/dist/powersync/schema/note.js +34 -0
- package/dist/powersync/schema/notesExtras.d.ts +62 -0
- package/dist/powersync/schema/notesExtras.js +71 -0
- package/dist/powersync/schema/projects.d.ts +101 -0
- package/dist/powersync/schema/projects.js +101 -0
- package/dist/powersync/schema/tags.d.ts +37 -0
- package/dist/powersync/schema/tags.js +37 -0
- package/dist/powersync/tokenIndex.d.ts +17 -0
- package/dist/powersync/tokenIndex.js +202 -0
- package/dist/powersync/uploader.d.ts +7 -0
- package/dist/powersync/uploader.js +134 -0
- package/dist/utils/argValue.d.ts +1 -0
- package/dist/utils/argValue.js +17 -0
- package/dist/utils/errors.d.ts +10 -0
- package/dist/utils/errors.js +17 -0
- package/dist/utils/id.d.ts +1 -0
- package/dist/utils/id.js +4 -0
- package/dist/utils/output.d.ts +2 -0
- package/dist/utils/output.js +10 -0
- package/dist/utils/redact.d.ts +1 -0
- package/dist/utils/redact.js +10 -0
- package/dist/utils/text.d.ts +1 -0
- package/dist/utils/text.js +35 -0
- package/dist/utils/time.d.ts +1 -0
- package/dist/utils/time.js +3 -0
- package/dist/utils/tiptapMarkdown.d.ts +6 -0
- package/dist/utils/tiptapMarkdown.js +149 -0
- package/dist/utils/tokenize.d.ts +1 -0
- package/dist/utils/tokenize.js +56 -0
- package/dist/utils/version.d.ts +1 -0
- package/dist/utils/version.js +21 -0
- package/package.json +63 -0
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import os from 'node:os';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
function resolveConfigHome() {
|
|
4
|
+
if (process.env.DINOX_CONFIG_DIR) {
|
|
5
|
+
return process.env.DINOX_CONFIG_DIR;
|
|
6
|
+
}
|
|
7
|
+
if (process.platform === 'win32') {
|
|
8
|
+
return process.env.APPDATA ?? path.join(os.homedir(), 'AppData', 'Roaming');
|
|
9
|
+
}
|
|
10
|
+
if (process.platform === 'darwin') {
|
|
11
|
+
return path.join(os.homedir(), 'Library', 'Application Support');
|
|
12
|
+
}
|
|
13
|
+
return process.env.XDG_CONFIG_HOME ?? path.join(os.homedir(), '.config');
|
|
14
|
+
}
|
|
15
|
+
function resolveDataHome() {
|
|
16
|
+
if (process.env.DINOX_DATA_DIR) {
|
|
17
|
+
return process.env.DINOX_DATA_DIR;
|
|
18
|
+
}
|
|
19
|
+
if (process.platform === 'win32') {
|
|
20
|
+
return process.env.LOCALAPPDATA ?? path.join(os.homedir(), 'AppData', 'Local');
|
|
21
|
+
}
|
|
22
|
+
if (process.platform === 'darwin') {
|
|
23
|
+
// macOS typically stores app data under Application Support.
|
|
24
|
+
return path.join(os.homedir(), 'Library', 'Application Support');
|
|
25
|
+
}
|
|
26
|
+
return process.env.XDG_DATA_HOME ?? path.join(os.homedir(), '.local', 'share');
|
|
27
|
+
}
|
|
28
|
+
export function getConfigDir() {
|
|
29
|
+
return path.join(resolveConfigHome(), 'dinox');
|
|
30
|
+
}
|
|
31
|
+
export function getConfigFilePath() {
|
|
32
|
+
return path.join(getConfigDir(), 'config.json');
|
|
33
|
+
}
|
|
34
|
+
export function getDataDir() {
|
|
35
|
+
return path.join(resolveDataHome(), 'dinox');
|
|
36
|
+
}
|
|
37
|
+
export function getPowerSyncDbPath() {
|
|
38
|
+
return path.join(getDataDir(), 'powersync.sqlite');
|
|
39
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { POWERSYNC_DEFAULT_ENDPOINT, POWERSYNC_DEFAULT_TOKEN_ENDPOINT, POWERSYNC_DEFAULT_UPLOAD_BASE_URL, } from './serviceEndpoints.js';
|
|
2
|
+
const DEFAULT_UPLOAD_V4_PATH = '/powersync/v4/uploadData';
|
|
3
|
+
const DEFAULT_UPLOAD_V2_PATH = '/powersync/v2/uploadData';
|
|
4
|
+
const DEFAULT_SYNC_TIMEOUT_MS = 15_000;
|
|
5
|
+
function nonEmptyString(value) {
|
|
6
|
+
if (typeof value !== 'string') {
|
|
7
|
+
return undefined;
|
|
8
|
+
}
|
|
9
|
+
const trimmed = value.trim();
|
|
10
|
+
return trimmed.length > 0 ? trimmed : undefined;
|
|
11
|
+
}
|
|
12
|
+
function parsePositiveInt(value) {
|
|
13
|
+
if (typeof value !== 'string' && typeof value !== 'number') {
|
|
14
|
+
return undefined;
|
|
15
|
+
}
|
|
16
|
+
const parsed = Number(value);
|
|
17
|
+
if (!Number.isFinite(parsed) || parsed <= 0) {
|
|
18
|
+
return undefined;
|
|
19
|
+
}
|
|
20
|
+
return Math.trunc(parsed);
|
|
21
|
+
}
|
|
22
|
+
export function resolveConfig(raw) {
|
|
23
|
+
const fileAuth = nonEmptyString(raw.auth?.authorization);
|
|
24
|
+
const fileUserId = nonEmptyString(raw.auth?.userId);
|
|
25
|
+
const fileUsername = nonEmptyString(raw.auth?.username ?? undefined) ?? raw.auth?.username ?? undefined;
|
|
26
|
+
const fileEmail = nonEmptyString(raw.auth?.email ?? undefined) ?? raw.auth?.email ?? undefined;
|
|
27
|
+
const fileUserInfoSyncedAt = nonEmptyString(raw.auth?.userInfoSyncedAt);
|
|
28
|
+
const fileTokenEndpoint = nonEmptyString(raw.powersync?.tokenEndpoint);
|
|
29
|
+
const fileUploadV4Path = nonEmptyString(raw.powersync?.uploadV4Path);
|
|
30
|
+
const fileUploadV2Path = nonEmptyString(raw.powersync?.uploadV2Path);
|
|
31
|
+
const fileTimeoutMs = parsePositiveInt(raw.sync?.timeoutMs);
|
|
32
|
+
const envAuth = nonEmptyString(process.env.DINOX_AUTHORIZATION);
|
|
33
|
+
const envTokenEndpoint = nonEmptyString(process.env.DINOX_POWERSYNC_TOKEN_ENDPOINT) ?? nonEmptyString(process.env.POWERSYNC_TOKEN_ENDPOINT);
|
|
34
|
+
const envUploadV4Path = nonEmptyString(process.env.DINOX_POWERSYNC_UPLOAD_V4_PATH);
|
|
35
|
+
const envUploadV2Path = nonEmptyString(process.env.DINOX_POWERSYNC_UPLOAD_V2_PATH);
|
|
36
|
+
const envTimeoutMs = parsePositiveInt(process.env.DINOX_SYNC_TIMEOUT_MS);
|
|
37
|
+
return {
|
|
38
|
+
auth: {
|
|
39
|
+
authorization: envAuth ?? fileAuth,
|
|
40
|
+
userId: fileUserId,
|
|
41
|
+
username: fileUsername,
|
|
42
|
+
email: fileEmail,
|
|
43
|
+
userInfoSyncedAt: fileUserInfoSyncedAt,
|
|
44
|
+
},
|
|
45
|
+
powersync: {
|
|
46
|
+
endpoint: POWERSYNC_DEFAULT_ENDPOINT,
|
|
47
|
+
tokenEndpoint: envTokenEndpoint ?? fileTokenEndpoint ?? POWERSYNC_DEFAULT_TOKEN_ENDPOINT,
|
|
48
|
+
uploadBaseUrl: POWERSYNC_DEFAULT_UPLOAD_BASE_URL,
|
|
49
|
+
uploadV4Path: envUploadV4Path ?? fileUploadV4Path ?? DEFAULT_UPLOAD_V4_PATH,
|
|
50
|
+
uploadV2Path: envUploadV2Path ?? fileUploadV2Path ?? DEFAULT_UPLOAD_V2_PATH,
|
|
51
|
+
},
|
|
52
|
+
sync: {
|
|
53
|
+
timeoutMs: envTimeoutMs ?? fileTimeoutMs ?? DEFAULT_SYNC_TIMEOUT_MS,
|
|
54
|
+
},
|
|
55
|
+
};
|
|
56
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { DinoxConfig } from './types.js';
|
|
2
|
+
export declare function loadConfig(): Promise<DinoxConfig>;
|
|
3
|
+
export declare function saveConfig(config: DinoxConfig): Promise<void>;
|
|
4
|
+
export declare function setConfigValue(keyPath: string, value: unknown): Promise<void>;
|
|
5
|
+
export declare function getConfigValue(keyPath?: string): Promise<unknown>;
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { getConfigDir, getConfigFilePath } from './paths.js';
|
|
4
|
+
import { generateId } from '../utils/id.js';
|
|
5
|
+
function isPlainObject(value) {
|
|
6
|
+
return typeof value === 'object' && value !== null && !Array.isArray(value);
|
|
7
|
+
}
|
|
8
|
+
async function ensureConfigDir() {
|
|
9
|
+
const dir = getConfigDir();
|
|
10
|
+
await fs.mkdir(dir, { recursive: true, mode: 0o700 });
|
|
11
|
+
return dir;
|
|
12
|
+
}
|
|
13
|
+
export async function loadConfig() {
|
|
14
|
+
const filePath = getConfigFilePath();
|
|
15
|
+
try {
|
|
16
|
+
const raw = await fs.readFile(filePath, 'utf8');
|
|
17
|
+
const parsed = JSON.parse(raw);
|
|
18
|
+
if (!isPlainObject(parsed)) {
|
|
19
|
+
return {};
|
|
20
|
+
}
|
|
21
|
+
return parsed;
|
|
22
|
+
}
|
|
23
|
+
catch (error) {
|
|
24
|
+
if (error?.code === 'ENOENT') {
|
|
25
|
+
return {};
|
|
26
|
+
}
|
|
27
|
+
throw error;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
export async function saveConfig(config) {
|
|
31
|
+
const dir = await ensureConfigDir();
|
|
32
|
+
const filePath = getConfigFilePath();
|
|
33
|
+
const tmpPath = path.join(dir, `config.${generateId()}.tmp`);
|
|
34
|
+
const payload = `${JSON.stringify(config, null, 2)}\n`;
|
|
35
|
+
// Write config atomically to avoid partial reads.
|
|
36
|
+
const handle = await fs.open(tmpPath, 'wx', 0o600);
|
|
37
|
+
try {
|
|
38
|
+
await handle.writeFile(payload, 'utf8');
|
|
39
|
+
}
|
|
40
|
+
finally {
|
|
41
|
+
await handle.close();
|
|
42
|
+
}
|
|
43
|
+
if (process.platform !== 'win32') {
|
|
44
|
+
// Best-effort: keep the config private.
|
|
45
|
+
await fs.chmod(tmpPath, 0o600).catch(() => undefined);
|
|
46
|
+
}
|
|
47
|
+
await fs.rename(tmpPath, filePath);
|
|
48
|
+
}
|
|
49
|
+
function setDeepValue(target, keyPath, value) {
|
|
50
|
+
const segments = keyPath.split('.').filter(Boolean);
|
|
51
|
+
if (segments.length === 0) {
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
let cursor = target;
|
|
55
|
+
for (let i = 0; i < segments.length - 1; i += 1) {
|
|
56
|
+
const segment = segments[i];
|
|
57
|
+
const next = cursor[segment];
|
|
58
|
+
if (!isPlainObject(next)) {
|
|
59
|
+
cursor[segment] = {};
|
|
60
|
+
}
|
|
61
|
+
cursor = cursor[segment];
|
|
62
|
+
}
|
|
63
|
+
cursor[segments[segments.length - 1]] = value;
|
|
64
|
+
}
|
|
65
|
+
function getDeepValue(target, keyPath) {
|
|
66
|
+
const segments = keyPath.split('.').filter(Boolean);
|
|
67
|
+
let cursor = target;
|
|
68
|
+
for (const segment of segments) {
|
|
69
|
+
if (!isPlainObject(cursor)) {
|
|
70
|
+
return undefined;
|
|
71
|
+
}
|
|
72
|
+
cursor = cursor[segment];
|
|
73
|
+
}
|
|
74
|
+
return cursor;
|
|
75
|
+
}
|
|
76
|
+
export async function setConfigValue(keyPath, value) {
|
|
77
|
+
const config = await loadConfig();
|
|
78
|
+
setDeepValue(config, keyPath, value);
|
|
79
|
+
await saveConfig(config);
|
|
80
|
+
}
|
|
81
|
+
export async function getConfigValue(keyPath) {
|
|
82
|
+
const config = await loadConfig();
|
|
83
|
+
if (!keyPath) {
|
|
84
|
+
return config;
|
|
85
|
+
}
|
|
86
|
+
return getDeepValue(config, keyPath);
|
|
87
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
export type DinoxConfig = {
|
|
2
|
+
auth?: {
|
|
3
|
+
/**
|
|
4
|
+
* Full Authorization header value.
|
|
5
|
+
* Example: "Bearer <token>".
|
|
6
|
+
*/
|
|
7
|
+
authorization?: string;
|
|
8
|
+
/**
|
|
9
|
+
* Backend user id from /auth/userInfo.
|
|
10
|
+
*/
|
|
11
|
+
userId?: string;
|
|
12
|
+
username?: string | null;
|
|
13
|
+
email?: string | null;
|
|
14
|
+
userInfoSyncedAt?: string;
|
|
15
|
+
};
|
|
16
|
+
powersync?: {
|
|
17
|
+
/** PowerSync sync service endpoint (downloads/subscriptions). */
|
|
18
|
+
endpoint?: string;
|
|
19
|
+
/** Endpoint that exchanges Authorization for PowerSync credentials. */
|
|
20
|
+
tokenEndpoint?: string;
|
|
21
|
+
/** Base URL for upload endpoints. */
|
|
22
|
+
uploadBaseUrl?: string;
|
|
23
|
+
/** Defaults to /powersync/v4/uploadData */
|
|
24
|
+
uploadV4Path?: string;
|
|
25
|
+
/** Defaults to /powersync/v2/uploadData */
|
|
26
|
+
uploadV2Path?: string;
|
|
27
|
+
};
|
|
28
|
+
sync?: {
|
|
29
|
+
/** Default sync/connect timeout in milliseconds. */
|
|
30
|
+
timeoutMs?: number;
|
|
31
|
+
};
|
|
32
|
+
};
|
|
33
|
+
export type ResolvedDinoxConfig = {
|
|
34
|
+
auth: {
|
|
35
|
+
authorization?: string;
|
|
36
|
+
userId?: string;
|
|
37
|
+
username?: string | null;
|
|
38
|
+
email?: string | null;
|
|
39
|
+
userInfoSyncedAt?: string;
|
|
40
|
+
};
|
|
41
|
+
powersync: {
|
|
42
|
+
endpoint: string;
|
|
43
|
+
tokenEndpoint: string;
|
|
44
|
+
uploadBaseUrl: string;
|
|
45
|
+
uploadV4Path: string;
|
|
46
|
+
uploadV2Path: string;
|
|
47
|
+
};
|
|
48
|
+
sync: {
|
|
49
|
+
timeoutMs: number;
|
|
50
|
+
};
|
|
51
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/dinox.d.ts
ADDED
package/dist/dinox.js
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createCli } from './cli.js';
|
|
3
|
+
import { isDinoxError } from './utils/errors.js';
|
|
4
|
+
import { printYaml } from './utils/output.js';
|
|
5
|
+
function normalizeArgv(argv) {
|
|
6
|
+
// pnpm/npm script forwarding can prepend a standalone "--".
|
|
7
|
+
if (argv[0] === '--') {
|
|
8
|
+
return argv.slice(1);
|
|
9
|
+
}
|
|
10
|
+
return argv;
|
|
11
|
+
}
|
|
12
|
+
async function main() {
|
|
13
|
+
const program = createCli();
|
|
14
|
+
const argv = normalizeArgv(process.argv.slice(2));
|
|
15
|
+
const executable = process.argv[1] ?? 'dino';
|
|
16
|
+
await program.parseAsync(['node', executable, ...argv]);
|
|
17
|
+
}
|
|
18
|
+
function shouldOutputJson(argv) {
|
|
19
|
+
return argv.includes('--json');
|
|
20
|
+
}
|
|
21
|
+
function isVerbose(argv) {
|
|
22
|
+
return argv.includes('--verbose');
|
|
23
|
+
}
|
|
24
|
+
main().catch((error) => {
|
|
25
|
+
const argv = process.argv;
|
|
26
|
+
const json = shouldOutputJson(argv);
|
|
27
|
+
const verbose = isVerbose(argv);
|
|
28
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
29
|
+
const name = error instanceof Error ? error.name : 'Error';
|
|
30
|
+
const stack = error instanceof Error ? error.stack : undefined;
|
|
31
|
+
const details = isDinoxError(error) ? error.details : undefined;
|
|
32
|
+
const exitCode = isDinoxError(error) ? error.exitCode : 1;
|
|
33
|
+
process.exitCode = exitCode;
|
|
34
|
+
if (json) {
|
|
35
|
+
printYaml({
|
|
36
|
+
ok: false,
|
|
37
|
+
error: {
|
|
38
|
+
name,
|
|
39
|
+
message,
|
|
40
|
+
...(details !== undefined ? { details } : {}),
|
|
41
|
+
...(verbose && stack ? { stack } : {}),
|
|
42
|
+
},
|
|
43
|
+
});
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
console.error(message);
|
|
47
|
+
if (verbose && stack) {
|
|
48
|
+
console.error(stack);
|
|
49
|
+
}
|
|
50
|
+
});
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { AbstractPowerSyncDatabase, PowerSyncBackendConnector, PowerSyncCredentials } from '@powersync/node';
|
|
2
|
+
export type { PowerSyncCredentials } from '@powersync/node';
|
|
3
|
+
export type CredentialFetcher = () => Promise<PowerSyncCredentials>;
|
|
4
|
+
export type UploadHandler = (database: AbstractPowerSyncDatabase) => Promise<void>;
|
|
5
|
+
/**
|
|
6
|
+
* Connector that fetches PowerSync credentials from your backend.
|
|
7
|
+
* This is injectable for testability.
|
|
8
|
+
*/
|
|
9
|
+
export declare class PowerSyncConnector implements PowerSyncBackendConnector {
|
|
10
|
+
private readonly fetcher;
|
|
11
|
+
private readonly uploader?;
|
|
12
|
+
constructor(fetcher: CredentialFetcher, uploader?: UploadHandler | undefined);
|
|
13
|
+
fetchCredentials(): Promise<PowerSyncCredentials>;
|
|
14
|
+
uploadData(database: AbstractPowerSyncDatabase): Promise<void>;
|
|
15
|
+
}
|
|
16
|
+
export declare function fetchCredentialsWithAuthorization({ authorization, endpoint, credentialsUrl, signal, }: {
|
|
17
|
+
authorization: string;
|
|
18
|
+
endpoint: string;
|
|
19
|
+
credentialsUrl: string;
|
|
20
|
+
signal?: AbortSignal;
|
|
21
|
+
}): Promise<PowerSyncCredentials>;
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Connector that fetches PowerSync credentials from your backend.
|
|
3
|
+
* This is injectable for testability.
|
|
4
|
+
*/
|
|
5
|
+
export class PowerSyncConnector {
|
|
6
|
+
fetcher;
|
|
7
|
+
uploader;
|
|
8
|
+
constructor(fetcher, uploader) {
|
|
9
|
+
this.fetcher = fetcher;
|
|
10
|
+
this.uploader = uploader;
|
|
11
|
+
}
|
|
12
|
+
async fetchCredentials() {
|
|
13
|
+
const credentials = await this.fetcher();
|
|
14
|
+
if (!credentials?.endpoint || !credentials?.token) {
|
|
15
|
+
throw new Error('PowerSync credentials must include endpoint and token');
|
|
16
|
+
}
|
|
17
|
+
return credentials;
|
|
18
|
+
}
|
|
19
|
+
async uploadData(database) {
|
|
20
|
+
if (!this.uploader) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
await this.uploader(database);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
export async function fetchCredentialsWithAuthorization({ authorization, endpoint, credentialsUrl, signal, }) {
|
|
27
|
+
if (!authorization) {
|
|
28
|
+
throw new Error('PowerSync credential exchange requires an authorization token');
|
|
29
|
+
}
|
|
30
|
+
if (!endpoint) {
|
|
31
|
+
throw new Error('PowerSync credential exchange requires an endpoint');
|
|
32
|
+
}
|
|
33
|
+
if (!credentialsUrl) {
|
|
34
|
+
throw new Error('PowerSync credential exchange requires a token endpoint');
|
|
35
|
+
}
|
|
36
|
+
const response = await fetch(credentialsUrl, {
|
|
37
|
+
method: 'GET',
|
|
38
|
+
headers: {
|
|
39
|
+
Authorization: authorization,
|
|
40
|
+
'Content-Type': 'application/json',
|
|
41
|
+
},
|
|
42
|
+
signal,
|
|
43
|
+
});
|
|
44
|
+
if (!response.ok) {
|
|
45
|
+
const errorText = await response.text().catch(() => '');
|
|
46
|
+
throw new Error(`Received ${response.status} from ${credentialsUrl}: ${errorText}`);
|
|
47
|
+
}
|
|
48
|
+
const payload = await response.json().catch(() => null);
|
|
49
|
+
const token = payload?.token ?? payload?.data?.token ?? null;
|
|
50
|
+
const endpointFromPayload = payload?.endpoint ?? payload?.data?.endpoint ?? null;
|
|
51
|
+
if (!token) {
|
|
52
|
+
throw new Error('PowerSync credential response did not include a token');
|
|
53
|
+
}
|
|
54
|
+
return {
|
|
55
|
+
endpoint: endpointFromPayload ?? endpoint,
|
|
56
|
+
token,
|
|
57
|
+
};
|
|
58
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { PowerSyncDatabase } from '@powersync/node';
|
|
2
|
+
import type { AbstractPowerSyncDatabase, SyncStatusOptions } from '@powersync/common';
|
|
3
|
+
import type { ResolvedDinoxConfig } from '../config/types.js';
|
|
4
|
+
export type ConnectResult = {
|
|
5
|
+
db: PowerSyncDatabase;
|
|
6
|
+
dbPath: string;
|
|
7
|
+
stale: boolean;
|
|
8
|
+
};
|
|
9
|
+
export declare function getStatusSnapshot(db: AbstractPowerSyncDatabase): {
|
|
10
|
+
connected: boolean;
|
|
11
|
+
connecting: boolean;
|
|
12
|
+
hasSynced: boolean;
|
|
13
|
+
lastSyncedAt: string | null;
|
|
14
|
+
dataFlowStatus: {
|
|
15
|
+
downloading: boolean;
|
|
16
|
+
uploading: boolean;
|
|
17
|
+
downloadError?: {
|
|
18
|
+
name: string;
|
|
19
|
+
message: string;
|
|
20
|
+
};
|
|
21
|
+
uploadError?: {
|
|
22
|
+
name: string;
|
|
23
|
+
message: string;
|
|
24
|
+
};
|
|
25
|
+
};
|
|
26
|
+
raw: SyncStatusOptions;
|
|
27
|
+
};
|
|
28
|
+
export declare function openPowerSyncDatabase(): Promise<{
|
|
29
|
+
db: PowerSyncDatabase;
|
|
30
|
+
dbPath: string;
|
|
31
|
+
}>;
|
|
32
|
+
export declare function connectPowerSync(args: {
|
|
33
|
+
config: ResolvedDinoxConfig;
|
|
34
|
+
offline: boolean;
|
|
35
|
+
timeoutMs: number;
|
|
36
|
+
}): Promise<ConnectResult>;
|
|
37
|
+
export declare function waitForIdleDataFlow(db: AbstractPowerSyncDatabase, timeoutMs: number): Promise<boolean>;
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import { PowerSyncDatabase } from '@powersync/node';
|
|
3
|
+
import { getDataDir, getPowerSyncDbPath } from '../config/paths.js';
|
|
4
|
+
import { DinoxError } from '../utils/errors.js';
|
|
5
|
+
import { AppSchema } from './schema/index.js';
|
|
6
|
+
import { PowerSyncConnector, fetchCredentialsWithAuthorization } from './connector.js';
|
|
7
|
+
import { createPowerSyncUploadHandler } from './uploader.js';
|
|
8
|
+
async function ensureDataDir() {
|
|
9
|
+
const dir = getDataDir();
|
|
10
|
+
await fs.mkdir(dir, { recursive: true, mode: 0o700 });
|
|
11
|
+
return dir;
|
|
12
|
+
}
|
|
13
|
+
function createTimeoutSignal(timeoutMs) {
|
|
14
|
+
const controller = new AbortController();
|
|
15
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
16
|
+
return {
|
|
17
|
+
signal: controller.signal,
|
|
18
|
+
cancel: () => clearTimeout(timer),
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
export function getStatusSnapshot(db) {
|
|
22
|
+
const raw = db.currentStatus?.toJSON?.() ?? {};
|
|
23
|
+
const lastSyncedAt = raw.lastSyncedAt instanceof Date ? raw.lastSyncedAt.toISOString() : raw.lastSyncedAt ?? null;
|
|
24
|
+
const dataFlow = raw.dataFlow ?? {};
|
|
25
|
+
return {
|
|
26
|
+
connected: Boolean(raw.connected),
|
|
27
|
+
connecting: Boolean(raw.connecting),
|
|
28
|
+
hasSynced: Boolean(raw.hasSynced),
|
|
29
|
+
lastSyncedAt,
|
|
30
|
+
dataFlowStatus: {
|
|
31
|
+
downloading: Boolean(dataFlow.downloading),
|
|
32
|
+
uploading: Boolean(dataFlow.uploading),
|
|
33
|
+
downloadError: dataFlow.downloadError
|
|
34
|
+
? { name: dataFlow.downloadError.name ?? 'Error', message: dataFlow.downloadError.message ?? String(dataFlow.downloadError) }
|
|
35
|
+
: undefined,
|
|
36
|
+
uploadError: dataFlow.uploadError
|
|
37
|
+
? { name: dataFlow.uploadError.name ?? 'Error', message: dataFlow.uploadError.message ?? String(dataFlow.uploadError) }
|
|
38
|
+
: undefined,
|
|
39
|
+
},
|
|
40
|
+
raw,
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
export async function openPowerSyncDatabase() {
|
|
44
|
+
await ensureDataDir();
|
|
45
|
+
const dbPath = getPowerSyncDbPath();
|
|
46
|
+
const db = new PowerSyncDatabase({
|
|
47
|
+
schema: AppSchema,
|
|
48
|
+
database: {
|
|
49
|
+
dbFilename: dbPath,
|
|
50
|
+
},
|
|
51
|
+
});
|
|
52
|
+
// Ensure initialization errors surface early.
|
|
53
|
+
await db.init();
|
|
54
|
+
return { db, dbPath };
|
|
55
|
+
}
|
|
56
|
+
function assertConfigured(config) {
|
|
57
|
+
if (!config.auth.authorization) {
|
|
58
|
+
throw new DinoxError('Not logged in. Run: dino auth login <authorization>');
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
export async function connectPowerSync(args) {
|
|
62
|
+
const { db, dbPath } = await openPowerSyncDatabase();
|
|
63
|
+
if (args.offline) {
|
|
64
|
+
return { db, dbPath, stale: true };
|
|
65
|
+
}
|
|
66
|
+
assertConfigured(args.config);
|
|
67
|
+
const uploader = args.config.powersync.uploadBaseUrl
|
|
68
|
+
? createPowerSyncUploadHandler({
|
|
69
|
+
authorization: args.config.auth.authorization,
|
|
70
|
+
uploadBaseUrl: args.config.powersync.uploadBaseUrl,
|
|
71
|
+
uploadV4Path: args.config.powersync.uploadV4Path,
|
|
72
|
+
uploadV2Path: args.config.powersync.uploadV2Path,
|
|
73
|
+
})
|
|
74
|
+
: undefined;
|
|
75
|
+
const connector = new PowerSyncConnector(async () => {
|
|
76
|
+
return fetchCredentialsWithAuthorization({
|
|
77
|
+
authorization: args.config.auth.authorization,
|
|
78
|
+
endpoint: args.config.powersync.endpoint,
|
|
79
|
+
credentialsUrl: args.config.powersync.tokenEndpoint,
|
|
80
|
+
});
|
|
81
|
+
}, uploader);
|
|
82
|
+
await db.connect(connector);
|
|
83
|
+
const { signal, cancel } = createTimeoutSignal(args.timeoutMs);
|
|
84
|
+
try {
|
|
85
|
+
await db.waitForFirstSync(signal);
|
|
86
|
+
return { db, dbPath, stale: signal.aborted };
|
|
87
|
+
}
|
|
88
|
+
finally {
|
|
89
|
+
cancel();
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
export async function waitForIdleDataFlow(db, timeoutMs) {
|
|
93
|
+
const { signal, cancel } = createTimeoutSignal(timeoutMs);
|
|
94
|
+
try {
|
|
95
|
+
await db.waitForStatus(status => {
|
|
96
|
+
const flow = status.dataFlowStatus;
|
|
97
|
+
return !flow.downloading && !flow.uploading;
|
|
98
|
+
}, signal);
|
|
99
|
+
return !signal.aborted;
|
|
100
|
+
}
|
|
101
|
+
catch {
|
|
102
|
+
return false;
|
|
103
|
+
}
|
|
104
|
+
finally {
|
|
105
|
+
cancel();
|
|
106
|
+
}
|
|
107
|
+
}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import { Table } from '@powersync/common';
|
|
2
|
+
export declare const c_card_box: Table<{
|
|
3
|
+
id: import("@powersync/common").BaseColumnType<string | null>;
|
|
4
|
+
name: import("@powersync/common").BaseColumnType<string | null>;
|
|
5
|
+
type: import("@powersync/common").BaseColumnType<string | null>;
|
|
6
|
+
cover_url: import("@powersync/common").BaseColumnType<string | null>;
|
|
7
|
+
prompt: import("@powersync/common").BaseColumnType<string | null>;
|
|
8
|
+
headers: import("@powersync/common").BaseColumnType<string | null>;
|
|
9
|
+
priority: import("@powersync/common").BaseColumnType<number | null>;
|
|
10
|
+
created_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
11
|
+
updated_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
12
|
+
extra_data: import("@powersync/common").BaseColumnType<string | null>;
|
|
13
|
+
user_no: import("@powersync/common").BaseColumnType<string | null>;
|
|
14
|
+
is_del: import("@powersync/common").BaseColumnType<number | null>;
|
|
15
|
+
template_id: import("@powersync/common").BaseColumnType<string | null>;
|
|
16
|
+
}>;
|
|
17
|
+
export declare const c_card_note: Table<{
|
|
18
|
+
id: import("@powersync/common").BaseColumnType<string | null>;
|
|
19
|
+
box_id: import("@powersync/common").BaseColumnType<string | null>;
|
|
20
|
+
content: import("@powersync/common").BaseColumnType<string | null>;
|
|
21
|
+
created_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
22
|
+
updated_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
23
|
+
extra_data: import("@powersync/common").BaseColumnType<string | null>;
|
|
24
|
+
user_no: import("@powersync/common").BaseColumnType<string | null>;
|
|
25
|
+
is_del: import("@powersync/common").BaseColumnType<number | null>;
|
|
26
|
+
origin_note_id: import("@powersync/common").BaseColumnType<string | null>;
|
|
27
|
+
origin_type: import("@powersync/common").BaseColumnType<string | null>;
|
|
28
|
+
output_type: import("@powersync/common").BaseColumnType<string | null>;
|
|
29
|
+
is_array: import("@powersync/common").BaseColumnType<number | null>;
|
|
30
|
+
}>;
|
|
31
|
+
export declare const c_cmd: Table<{
|
|
32
|
+
id: import("@powersync/common").BaseColumnType<string | null>;
|
|
33
|
+
zone: import("@powersync/common").BaseColumnType<string | null>;
|
|
34
|
+
type: import("@powersync/common").BaseColumnType<string | null>;
|
|
35
|
+
cmd: import("@powersync/common").BaseColumnType<string | null>;
|
|
36
|
+
name: import("@powersync/common").BaseColumnType<string | null>;
|
|
37
|
+
priority: import("@powersync/common").BaseColumnType<number | null>;
|
|
38
|
+
is_show: import("@powersync/common").BaseColumnType<number | null>;
|
|
39
|
+
description: import("@powersync/common").BaseColumnType<string | null>;
|
|
40
|
+
extra_data: import("@powersync/common").BaseColumnType<string | null>;
|
|
41
|
+
user_id: import("@powersync/common").BaseColumnType<string | null>;
|
|
42
|
+
insert_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
43
|
+
update_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
44
|
+
is_del: import("@powersync/common").BaseColumnType<number | null>;
|
|
45
|
+
template_id: import("@powersync/common").BaseColumnType<string | null>;
|
|
46
|
+
meta_data: import("@powersync/common").BaseColumnType<string | null>;
|
|
47
|
+
output: import("@powersync/common").BaseColumnType<string | null>;
|
|
48
|
+
output_param: import("@powersync/common").BaseColumnType<string | null>;
|
|
49
|
+
category: import("@powersync/common").BaseColumnType<string | null>;
|
|
50
|
+
}>;
|
|
51
|
+
export declare const c_note_template: Table<{
|
|
52
|
+
id: import("@powersync/common").BaseColumnType<string | null>;
|
|
53
|
+
content: import("@powersync/common").BaseColumnType<string | null>;
|
|
54
|
+
name: import("@powersync/common").BaseColumnType<string | null>;
|
|
55
|
+
priority: import("@powersync/common").BaseColumnType<number | null>;
|
|
56
|
+
is_show: import("@powersync/common").BaseColumnType<number | null>;
|
|
57
|
+
extra_data: import("@powersync/common").BaseColumnType<string | null>;
|
|
58
|
+
user_id: import("@powersync/common").BaseColumnType<string | null>;
|
|
59
|
+
insert_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
60
|
+
update_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
61
|
+
is_del: import("@powersync/common").BaseColumnType<number | null>;
|
|
62
|
+
}>;
|
|
63
|
+
export declare const c_zettel_box: Table<{
|
|
64
|
+
id: import("@powersync/common").BaseColumnType<string | null>;
|
|
65
|
+
name: import("@powersync/common").BaseColumnType<string | null>;
|
|
66
|
+
description: import("@powersync/common").BaseColumnType<string | null>;
|
|
67
|
+
color: import("@powersync/common").BaseColumnType<string | null>;
|
|
68
|
+
priority: import("@powersync/common").BaseColumnType<number | null>;
|
|
69
|
+
is_show: import("@powersync/common").BaseColumnType<number | null>;
|
|
70
|
+
cover_url: import("@powersync/common").BaseColumnType<string | null>;
|
|
71
|
+
created_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
72
|
+
updated_at: import("@powersync/common").BaseColumnType<string | null>;
|
|
73
|
+
extra_data: import("@powersync/common").BaseColumnType<string | null>;
|
|
74
|
+
user_no: import("@powersync/common").BaseColumnType<string | null>;
|
|
75
|
+
is_del: import("@powersync/common").BaseColumnType<number | null>;
|
|
76
|
+
}>;
|