@pantheon.ai/agents 0.3.4 → 0.3.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -0
- package/dist/index.js +28 -35
- package/package.json +1 -1
- package/dist/token-B1_fh4ov.js +0 -50
- package/dist/token-error-B9tnlSZq.js +0 -72
- package/dist/token-util-TO_8V0Wr.js +0 -7
- package/dist/token-util-rhrS-6qe.js +0 -356
package/README.md
CHANGED
|
@@ -172,6 +172,12 @@ pantheon-agents gen-migration-sql --provider tidb --from 0.3.0
|
|
|
172
172
|
pantheon-agents delete-task <agent-name> <task-id>
|
|
173
173
|
```
|
|
174
174
|
|
|
175
|
+
## Task Dependencies & Retries
|
|
176
|
+
|
|
177
|
+
- A dependent task (with `parent_task_id`) only starts after its parent reaches `completed`.
|
|
178
|
+
- If the parent is `failed`/`canceled`, the child stays `pending` (so the parent can be retried).
|
|
179
|
+
- If the parent is permanently failing, manually `retry-task` / `cancel-task` the child / `kill` the subtree.
|
|
180
|
+
|
|
175
181
|
For full options of any command:
|
|
176
182
|
|
|
177
183
|
```bash
|
package/dist/index.js
CHANGED
|
@@ -399,7 +399,7 @@ var require_cli_options = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
|
399
399
|
|
|
400
400
|
//#endregion
|
|
401
401
|
//#region package.json
|
|
402
|
-
var version = "0.3.
|
|
402
|
+
var version = "0.3.5";
|
|
403
403
|
|
|
404
404
|
//#endregion
|
|
405
405
|
//#region src/db/db9.ts
|
|
@@ -1177,9 +1177,9 @@ const snapSchema = z.object({
|
|
|
1177
1177
|
step_index: z.number(),
|
|
1178
1178
|
event_stream_id: z.string().nullable(),
|
|
1179
1179
|
tool_use_id: z.string().nullable(),
|
|
1180
|
-
manifest_status: z.string().nullable(),
|
|
1181
|
-
manifest_task_id: z.string().nullable(),
|
|
1182
|
-
manifest_snap_id: z.string().nullable()
|
|
1180
|
+
manifest_status: z.string().nullable().optional(),
|
|
1181
|
+
manifest_task_id: z.string().nullable().optional(),
|
|
1182
|
+
manifest_snap_id: z.string().nullable().optional()
|
|
1183
1183
|
});
|
|
1184
1184
|
const snapDetailsSchema = snapSchema.extend({
|
|
1185
1185
|
output: z.string().nullable(),
|
|
@@ -1207,7 +1207,7 @@ const branchSchema = z.object({
|
|
|
1207
1207
|
updated_at: zodJsonDate,
|
|
1208
1208
|
latest_snap_id: z.string().nullable(),
|
|
1209
1209
|
latest_snap: snapSchema.nullable(),
|
|
1210
|
-
manifest: branchManifestSchema.nullable()
|
|
1210
|
+
manifest: branchManifestSchema.nullable().optional()
|
|
1211
1211
|
});
|
|
1212
1212
|
const branchSnapsSchema = z.object({
|
|
1213
1213
|
branch_id: z.string(),
|
|
@@ -2131,7 +2131,7 @@ async function killPantheonBranchExecution({ projectId, branchId }) {
|
|
|
2131
2131
|
const DEFAULT_CONCURRENCY = 1;
|
|
2132
2132
|
const DEFAULT_MAX_RETRY_ATTEMPTS$1 = 3;
|
|
2133
2133
|
const DEFAULT_RETRY_BACKOFF_SECONDS$1 = 30;
|
|
2134
|
-
const AUTO_RECONFIG_PROMPT = "
|
|
2134
|
+
const AUTO_RECONFIG_PROMPT = "Reconfigure this branch to match the latest agent config.";
|
|
2135
2135
|
function getTaskAttemptCount(task) {
|
|
2136
2136
|
if (Number.isInteger(task.attempt_count) && task.attempt_count > 0) return task.attempt_count;
|
|
2137
2137
|
return 1;
|
|
@@ -2321,24 +2321,14 @@ async function getPendingChildren(provider, projectId, parentTaskId) {
|
|
|
2321
2321
|
status: ["pending"]
|
|
2322
2322
|
})).filter((item) => item.parent_task_id === parentTaskId);
|
|
2323
2323
|
}
|
|
2324
|
-
async function
|
|
2325
|
-
|
|
2326
|
-
|
|
2327
|
-
|
|
2328
|
-
|
|
2329
|
-
task: options.task.task,
|
|
2330
|
-
base_branch_id: options.task.base_branch_id,
|
|
2331
|
-
parent_task_id: options.nextParentTaskId,
|
|
2332
|
-
skipConfigTaskAutoParent: true
|
|
2333
|
-
});
|
|
2334
|
-
for (const descendant of pendingDescendants) await options.provider.updateTask({
|
|
2335
|
-
...descendant,
|
|
2336
|
-
parent_task_id: recreated.id
|
|
2337
|
-
});
|
|
2338
|
-
return {
|
|
2339
|
-
recreated,
|
|
2340
|
-
rewiredCount: pendingDescendants.length
|
|
2324
|
+
async function rebasePendingTaskToNewParent(options) {
|
|
2325
|
+
if (options.task.parent_task_id === options.nextParentTaskId) return { rebased: options.task };
|
|
2326
|
+
const rebased = {
|
|
2327
|
+
...options.task,
|
|
2328
|
+
parent_task_id: options.nextParentTaskId
|
|
2341
2329
|
};
|
|
2330
|
+
await options.provider.updateTask(rebased);
|
|
2331
|
+
return { rebased };
|
|
2342
2332
|
}
|
|
2343
2333
|
async function handleOutdatedCompletedTask(provider, task, currentVersion, expectedVersion, logger) {
|
|
2344
2334
|
const expectedVersionResolved = Math.max(expectedVersion, currentVersion + 1, 1);
|
|
@@ -2357,12 +2347,12 @@ async function handleOutdatedCompletedTask(provider, task, currentVersion, expec
|
|
|
2357
2347
|
steps
|
|
2358
2348
|
});
|
|
2359
2349
|
for (const child of pendingDirectChildren) {
|
|
2360
|
-
const {
|
|
2350
|
+
const { rebased } = await rebasePendingTaskToNewParent({
|
|
2361
2351
|
provider,
|
|
2362
2352
|
task: child,
|
|
2363
2353
|
nextParentTaskId
|
|
2364
2354
|
});
|
|
2365
|
-
logger.info("Rebase outdated task %s
|
|
2355
|
+
logger.info("Rebase outdated task %s under new parent %s (with %d reconfig steps). Triggered by completion of task %s.", child.id, rebased.parent_task_id, steps.length, task.id);
|
|
2366
2356
|
}
|
|
2367
2357
|
}
|
|
2368
2358
|
async function startPendingTask(provider, task, config, logger) {
|
|
@@ -2483,7 +2473,7 @@ async function preparePendingTaskForStart(provider, task, config, logger) {
|
|
|
2483
2473
|
async function handleOutdatedParentAtStart(provider, task, parentTask, currentVersion, expectedVersion, logger) {
|
|
2484
2474
|
const steps = await getNormalizedReconfigSteps(provider, currentVersion, expectedVersion);
|
|
2485
2475
|
const workingBranchId = getTaskWorkingBranchId(parentTask);
|
|
2486
|
-
const {
|
|
2476
|
+
const { rebased } = await rebasePendingTaskToNewParent({
|
|
2487
2477
|
provider,
|
|
2488
2478
|
task,
|
|
2489
2479
|
nextParentTaskId: await createReconfigStepChain({
|
|
@@ -2494,7 +2484,7 @@ async function handleOutdatedParentAtStart(provider, task, parentTask, currentVe
|
|
|
2494
2484
|
steps
|
|
2495
2485
|
})
|
|
2496
2486
|
});
|
|
2497
|
-
logger.info("Rebase outdated task %s
|
|
2487
|
+
logger.info("Rebase outdated task %s under new parent %s (with %d reconfig steps). Triggered by starting of task %s.", task.id, rebased.parent_task_id, steps.length, task.id);
|
|
2498
2488
|
}
|
|
2499
2489
|
|
|
2500
2490
|
//#endregion
|
|
@@ -2879,8 +2869,8 @@ function resolveConfigRole(options) {
|
|
|
2879
2869
|
if (options.previousRole?.trim()) return { role: options.previousRole.trim() };
|
|
2880
2870
|
return { error: "--role is required for first-time config in a project." };
|
|
2881
2871
|
}
|
|
2882
|
-
async function configAgent(name, options) {
|
|
2883
|
-
const provider = createTaskListProvider(name, pino());
|
|
2872
|
+
async function configAgent(name, options, deps = {}) {
|
|
2873
|
+
const provider = (deps.createProvider ?? createTaskListProvider)(name, pino());
|
|
2884
2874
|
try {
|
|
2885
2875
|
const previousConfig = await provider.getAgentConfig(options.projectId);
|
|
2886
2876
|
const resolvedRoleResult = resolveConfigRole({
|
|
@@ -2926,21 +2916,24 @@ async function configAgent(name, options) {
|
|
|
2926
2916
|
const task = await provider.getTask(options.taskId);
|
|
2927
2917
|
if (!task) throw new Error(`Task ${options.taskId} not found.`);
|
|
2928
2918
|
if (task.project_id !== options.projectId) throw new Error(`Task ${options.taskId} belongs to project ${task.project_id}, expected ${options.projectId}.`);
|
|
2929
|
-
if (task.type !== "default" || task.parent_task_id == null || task.status !== "completed") throw new Error(`Cannot use task ${options.taskId} (type = ${task.type},
|
|
2919
|
+
if (task.type !== "default" || task.parent_task_id == null || task.status !== "completed") throw new Error(`Cannot use task ${options.taskId} (type = ${task.type}, status = ${task.status}) as configured base branch.`);
|
|
2930
2920
|
const configuredBaseBranchId = task.branch_id;
|
|
2921
|
+
const baseConfigVersion = previousConfig.config_version + 1;
|
|
2922
|
+
const nextConfigVersion = options.prompt ? baseConfigVersion + 1 : baseConfigVersion;
|
|
2931
2923
|
if (options.prompt) configTaskId = (await provider.createTask({
|
|
2932
2924
|
task: options.prompt,
|
|
2933
2925
|
project_id: options.projectId,
|
|
2934
2926
|
base_branch_id: configuredBaseBranchId,
|
|
2927
|
+
parent_task_id: task.id,
|
|
2935
2928
|
type: "reconfig",
|
|
2936
|
-
configVersion:
|
|
2929
|
+
configVersion: nextConfigVersion
|
|
2937
2930
|
})).id;
|
|
2938
2931
|
else configTaskId = options.taskId;
|
|
2939
2932
|
await provider.updateAgentConfig({
|
|
2940
2933
|
project_id: previousConfig.project_id,
|
|
2941
2934
|
base_branch_id: configuredBaseBranchId,
|
|
2942
2935
|
config_task_id: configTaskId,
|
|
2943
|
-
config_version:
|
|
2936
|
+
config_version: nextConfigVersion,
|
|
2944
2937
|
concurrency,
|
|
2945
2938
|
max_retry_attempts: maxRetryAttempts,
|
|
2946
2939
|
retry_backoff_seconds: retryBackoffSeconds,
|
|
@@ -2950,9 +2943,9 @@ async function configAgent(name, options) {
|
|
|
2950
2943
|
envs: resolvedEnvs,
|
|
2951
2944
|
prototype_url: resolvedPrototypeUrl
|
|
2952
2945
|
});
|
|
2953
|
-
|
|
2946
|
+
await provider.updateTask({
|
|
2954
2947
|
...task,
|
|
2955
|
-
config_version:
|
|
2948
|
+
config_version: baseConfigVersion
|
|
2956
2949
|
});
|
|
2957
2950
|
const tasks = await provider.getTasks({
|
|
2958
2951
|
parent_task_id: task.parent_task_id,
|
|
@@ -4031,7 +4024,7 @@ function createRetryTaskCommand(version, deps = {}) {
|
|
|
4031
4024
|
|
|
4032
4025
|
//#endregion
|
|
4033
4026
|
//#region src/cli/commands/skill-sh.ts
|
|
4034
|
-
const EMBEDDED_SKILL_MARKDOWN = "---\nname: pantheon-agents\ndescription: \"Pantheon Agents CLI Usage\"\n---\n\n> Pantheon is the project name of an internal VM environment management project. Do not guess any usage from the name.\n\n\n## Pantheon Agents CLI Usage\n\n### Concepts\n\n- agent: Each agent has a role (like `developer`) and several skills to use.\n- environment: The context of tools, codes or anything else for agents to run tasks. You don't need to know the details.\n- tasks: tasks for each agent to run, each task will create a new environment.\n\n#### Task structure\n\n- agent: The agent to executing the task\n- task (text): The task content\n- attempt: Attempt number (starts from 1 and increases on retries)\n- parent_task_id: The parent task id\n- status: `pending`, `running`, `completed`, `failed`, `canceled`\n- queued_at\n- started_at\n- ended_at\n- finished_at\n- canceled_at\n- cancel_reason\n- output: The output of the completed task.\n- error: The last known error message (failed task error is kept across retries).\n\n#### Other internal implementation details you don't need to know or use\n\n- project\n- branches\n\n### CLI Commands\n\n**You can only use the CLI commands listed below.**\n\n#### Setup a base environment for an agent:\n\n```shell\n# Use default setup script at first time configuration\npantheon-agents config 'Kris' \\\n --role 'developer' \\\n --skills pantheon-issue-resolve,pantheon-solution-design \\\n --max-retry-attempts 3 \\\n --retry-backoff-seconds 30\n\n# Add some custom prompts\npantheon-agents config 'Kris' \\\n --role 'developer' \\\n --skills pantheon-issue-resolve,pantheon-solution-design \\\n 'Update tools to the latest version.'\n```\n\n#### Update agent's environment\n\n##### Configure agent environment by prompt\n\n```shell\npantheon-agents config 'Kris' 'Install some tools'\n```\n\n##### Configure agent to a task result environment\n\npantheon-agents config 'Kris' --task-id 42\n\n#### Enqueue tasks\n\n```shell\npantheon-agents add-task 'Kris' 'Some awesome task'\n ```\n\nWhen adding tasks, cli will output the task id. You can add tasks with `--parent-task-id` to create a task hierarchy.\n\n```shell\npantheon-agents add-task 'Kris' 'Some awesome task' --parent-task-id 42\n```\n\n\n#### List tasks\n\n```shell\npantheon-agents show-tasks --json 'Kris'\n```\n\n\n#### Show single task info\n\n```shell\npantheon-agents get-task 'Kris' 42\n```\n\nReturns task info using this task structure (without internal project/branch fields):\n\n```json\n{\n \"agent\": \"Kris\",\n \"task\": \"Some awesome task\",\n \"attempt\": 1,\n \"parent_task_id\": \"41\",\n \"status\": \"completed\",\n \"queued_at\": \"2026-02-12T00:00:00.000Z\",\n \"started_at\": \"2026-02-12T00:00:05.000Z\",\n \"ended_at\": \"2026-02-12T00:05:00.000Z\",\n \"finished_at\": \"2026-02-12T00:05:00.000Z\",\n \"canceled_at\": null,\n \"cancel_reason\": null,\n \"output\": \"Task output\",\n \"error\": null\n}\n```\n\n#### Cancel task\n\n```shell\npantheon-agents cancel-task 'Kris' 42 'Reason'\n```\n\n#### Retry failed task\n\n```shell\n# Manual retry (immediate requeue and increases attempt)\npantheon-agents retry-task 'Kris' 42 'Reason'\n```\n\n#### Print embedded skill markdown\n\n```shell\npantheon-agents skill.sh\n```\n";
|
|
4027
|
+
const EMBEDDED_SKILL_MARKDOWN = "---\nname: pantheon-agents\ndescription: \"Pantheon Agents CLI Usage\"\n---\n\n> Pantheon is the project name of an internal VM environment management project. Do not guess any usage from the name.\n\n\n## Pantheon Agents CLI Usage\n\n### Concepts\n\n- agent: Each agent has a role (like `developer`) and several skills to use.\n- environment: The context of tools, codes or anything else for agents to run tasks. You don't need to know the details.\n- tasks: tasks for each agent to run, each task will create a new environment.\n\n#### Task structure\n\n- agent: The agent to executing the task\n- task (text): The task content\n- attempt: Attempt number (starts from 1 and increases on retries)\n- parent_task_id: The parent task id\n- status: `pending`, `running`, `completed`, `failed`, `canceled`\n- queued_at\n- started_at\n- ended_at\n- finished_at\n- canceled_at\n- cancel_reason\n- output: The output of the completed task.\n- error: The last known error message (failed task error is kept across retries).\n\n#### Dependency & retry behavior\n\n- A task with `parent_task_id` only starts after its parent reaches `completed`.\n- If the parent is `failed`/`canceled`, the child stays `pending` so the parent can be retried.\n- If the parent has exhausted retries or is permanently failing, manually `retry-task`, `cancel-task`, or `kill` the task subtree.\n\n#### Other internal implementation details you don't need to know or use\n\n- project\n- branches\n\n### CLI Commands\n\n**You can only use the CLI commands listed below.**\n\n#### Setup a base environment for an agent:\n\n```shell\n# Use default setup script at first time configuration\npantheon-agents config 'Kris' \\\n --role 'developer' \\\n --skills pantheon-issue-resolve,pantheon-solution-design \\\n --max-retry-attempts 3 \\\n --retry-backoff-seconds 30\n\n# Add some custom prompts\npantheon-agents config 'Kris' \\\n --role 'developer' \\\n --skills pantheon-issue-resolve,pantheon-solution-design \\\n 'Update tools to the latest version.'\n```\n\n#### Update agent's environment\n\n##### Configure agent environment by prompt\n\n```shell\npantheon-agents config 'Kris' 'Install some tools'\n```\n\n##### Configure agent to a task result environment\n\npantheon-agents config 'Kris' --task-id 42\n\n#### Enqueue tasks\n\n```shell\npantheon-agents add-task 'Kris' 'Some awesome task'\n ```\n\nWhen adding tasks, cli will output the task id. You can add tasks with `--parent-task-id` to create a task hierarchy.\n\n```shell\npantheon-agents add-task 'Kris' 'Some awesome task' --parent-task-id 42\n```\n\n\n#### List tasks\n\n```shell\npantheon-agents show-tasks --json 'Kris'\n```\n\n\n#### Show single task info\n\n```shell\npantheon-agents get-task 'Kris' 42\n```\n\nReturns task info using this task structure (without internal project/branch fields):\n\n```json\n{\n \"agent\": \"Kris\",\n \"task\": \"Some awesome task\",\n \"attempt\": 1,\n \"parent_task_id\": \"41\",\n \"status\": \"completed\",\n \"queued_at\": \"2026-02-12T00:00:00.000Z\",\n \"started_at\": \"2026-02-12T00:00:05.000Z\",\n \"ended_at\": \"2026-02-12T00:05:00.000Z\",\n \"finished_at\": \"2026-02-12T00:05:00.000Z\",\n \"canceled_at\": null,\n \"cancel_reason\": null,\n \"output\": \"Task output\",\n \"error\": null\n}\n```\n\n#### Cancel task\n\n```shell\npantheon-agents cancel-task 'Kris' 42 'Reason'\n```\n\n#### Retry failed task\n\n```shell\n# Manual retry (immediate requeue and increases attempt)\npantheon-agents retry-task 'Kris' 42 'Reason'\n```\n\n#### Print embedded skill markdown\n\n```shell\npantheon-agents skill.sh\n```\n";
|
|
4035
4028
|
function createSkillShCommand(version) {
|
|
4036
4029
|
return createCommand("skill.sh").version(version).description("Print embedded pantheon-agents skill markdown").action(() => {
|
|
4037
4030
|
process$1.stdout.write(EMBEDDED_SKILL_MARKDOWN);
|
package/package.json
CHANGED
package/dist/token-B1_fh4ov.js
DELETED
|
@@ -1,50 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import { n as __commonJSMin, t as require_token_error } from "./token-error-B9tnlSZq.js";
|
|
3
|
-
import { t as require_token_util } from "./token-util-rhrS-6qe.js";
|
|
4
|
-
|
|
5
|
-
//#region ../../node_modules/@vercel/oidc/dist/token.js
|
|
6
|
-
var require_token = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
7
|
-
var __defProp = Object.defineProperty;
|
|
8
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
9
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
10
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
11
|
-
var __export = (target, all) => {
|
|
12
|
-
for (var name in all) __defProp(target, name, {
|
|
13
|
-
get: all[name],
|
|
14
|
-
enumerable: true
|
|
15
|
-
});
|
|
16
|
-
};
|
|
17
|
-
var __copyProps = (to, from, except, desc) => {
|
|
18
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
19
|
-
for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
|
|
20
|
-
get: () => from[key],
|
|
21
|
-
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
22
|
-
});
|
|
23
|
-
}
|
|
24
|
-
return to;
|
|
25
|
-
};
|
|
26
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
27
|
-
var token_exports = {};
|
|
28
|
-
__export(token_exports, { refreshToken: () => refreshToken });
|
|
29
|
-
module.exports = __toCommonJS(token_exports);
|
|
30
|
-
var import_token_error = require_token_error();
|
|
31
|
-
var import_token_util = require_token_util();
|
|
32
|
-
async function refreshToken() {
|
|
33
|
-
const { projectId, teamId } = (0, import_token_util.findProjectInfo)();
|
|
34
|
-
let maybeToken = (0, import_token_util.loadToken)(projectId);
|
|
35
|
-
if (!maybeToken || (0, import_token_util.isExpired)((0, import_token_util.getTokenPayload)(maybeToken.token))) {
|
|
36
|
-
const authToken = await (0, import_token_util.getVercelCliToken)();
|
|
37
|
-
if (!authToken) throw new import_token_error.VercelOidcTokenError("Failed to refresh OIDC token: Log in to Vercel CLI and link your project with `vc link`");
|
|
38
|
-
if (!projectId) throw new import_token_error.VercelOidcTokenError("Failed to refresh OIDC token: Try re-linking your project with `vc link`");
|
|
39
|
-
maybeToken = await (0, import_token_util.getVercelOidcToken)(authToken, projectId, teamId);
|
|
40
|
-
if (!maybeToken) throw new import_token_error.VercelOidcTokenError("Failed to refresh OIDC token");
|
|
41
|
-
(0, import_token_util.saveToken)(maybeToken, projectId);
|
|
42
|
-
}
|
|
43
|
-
process.env.VERCEL_OIDC_TOKEN = maybeToken.token;
|
|
44
|
-
}
|
|
45
|
-
}));
|
|
46
|
-
|
|
47
|
-
//#endregion
|
|
48
|
-
export default require_token();
|
|
49
|
-
|
|
50
|
-
export { };
|
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import { createRequire } from "node:module";
|
|
3
|
-
|
|
4
|
-
//#region \0rolldown/runtime.js
|
|
5
|
-
var __create = Object.create;
|
|
6
|
-
var __defProp = Object.defineProperty;
|
|
7
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
8
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
9
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
10
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
11
|
-
var __commonJSMin = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
|
|
12
|
-
var __copyProps = (to, from, except, desc) => {
|
|
13
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
-
for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
15
|
-
key = keys[i];
|
|
16
|
-
if (!__hasOwnProp.call(to, key) && key !== except) {
|
|
17
|
-
__defProp(to, key, {
|
|
18
|
-
get: ((k) => from[k]).bind(null, key),
|
|
19
|
-
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
20
|
-
});
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
return to;
|
|
25
|
-
};
|
|
26
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
27
|
-
value: mod,
|
|
28
|
-
enumerable: true
|
|
29
|
-
}) : target, mod));
|
|
30
|
-
var __require = /* @__PURE__ */ createRequire(import.meta.url);
|
|
31
|
-
|
|
32
|
-
//#endregion
|
|
33
|
-
//#region ../../node_modules/@vercel/oidc/dist/token-error.js
|
|
34
|
-
var require_token_error = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
35
|
-
var __defProp = Object.defineProperty;
|
|
36
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
37
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
38
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
39
|
-
var __export = (target, all) => {
|
|
40
|
-
for (var name in all) __defProp(target, name, {
|
|
41
|
-
get: all[name],
|
|
42
|
-
enumerable: true
|
|
43
|
-
});
|
|
44
|
-
};
|
|
45
|
-
var __copyProps = (to, from, except, desc) => {
|
|
46
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
47
|
-
for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
|
|
48
|
-
get: () => from[key],
|
|
49
|
-
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
50
|
-
});
|
|
51
|
-
}
|
|
52
|
-
return to;
|
|
53
|
-
};
|
|
54
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
55
|
-
var token_error_exports = {};
|
|
56
|
-
__export(token_error_exports, { VercelOidcTokenError: () => VercelOidcTokenError });
|
|
57
|
-
module.exports = __toCommonJS(token_error_exports);
|
|
58
|
-
var VercelOidcTokenError = class extends Error {
|
|
59
|
-
constructor(message, cause) {
|
|
60
|
-
super(message);
|
|
61
|
-
this.name = "VercelOidcTokenError";
|
|
62
|
-
this.cause = cause;
|
|
63
|
-
}
|
|
64
|
-
toString() {
|
|
65
|
-
if (this.cause) return `${this.name}: ${this.message}: ${this.cause}`;
|
|
66
|
-
return `${this.name}: ${this.message}`;
|
|
67
|
-
}
|
|
68
|
-
};
|
|
69
|
-
}));
|
|
70
|
-
|
|
71
|
-
//#endregion
|
|
72
|
-
export { __toESM as i, __commonJSMin as n, __require as r, require_token_error as t };
|
|
@@ -1,356 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import { n as __commonJSMin, r as __require, t as require_token_error } from "./token-error-B9tnlSZq.js";
|
|
3
|
-
|
|
4
|
-
//#region ../../node_modules/@vercel/oidc/dist/token-io.js
|
|
5
|
-
var require_token_io = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
6
|
-
var __create = Object.create;
|
|
7
|
-
var __defProp = Object.defineProperty;
|
|
8
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
9
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
10
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
11
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
12
|
-
var __export = (target, all) => {
|
|
13
|
-
for (var name in all) __defProp(target, name, {
|
|
14
|
-
get: all[name],
|
|
15
|
-
enumerable: true
|
|
16
|
-
});
|
|
17
|
-
};
|
|
18
|
-
var __copyProps = (to, from, except, desc) => {
|
|
19
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
20
|
-
for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
|
|
21
|
-
get: () => from[key],
|
|
22
|
-
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
return to;
|
|
26
|
-
};
|
|
27
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
28
|
-
value: mod,
|
|
29
|
-
enumerable: true
|
|
30
|
-
}) : target, mod));
|
|
31
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
32
|
-
var token_io_exports = {};
|
|
33
|
-
__export(token_io_exports, {
|
|
34
|
-
findRootDir: () => findRootDir,
|
|
35
|
-
getUserDataDir: () => getUserDataDir
|
|
36
|
-
});
|
|
37
|
-
module.exports = __toCommonJS(token_io_exports);
|
|
38
|
-
var import_path = __toESM(__require("path"));
|
|
39
|
-
var import_fs = __toESM(__require("fs"));
|
|
40
|
-
var import_os$1 = __toESM(__require("os"));
|
|
41
|
-
var import_token_error = require_token_error();
|
|
42
|
-
function findRootDir() {
|
|
43
|
-
try {
|
|
44
|
-
let dir = process.cwd();
|
|
45
|
-
while (dir !== import_path.default.dirname(dir)) {
|
|
46
|
-
const pkgPath = import_path.default.join(dir, ".vercel");
|
|
47
|
-
if (import_fs.default.existsSync(pkgPath)) return dir;
|
|
48
|
-
dir = import_path.default.dirname(dir);
|
|
49
|
-
}
|
|
50
|
-
} catch (e) {
|
|
51
|
-
throw new import_token_error.VercelOidcTokenError("Token refresh only supported in node server environments");
|
|
52
|
-
}
|
|
53
|
-
return null;
|
|
54
|
-
}
|
|
55
|
-
function getUserDataDir() {
|
|
56
|
-
if (process.env.XDG_DATA_HOME) return process.env.XDG_DATA_HOME;
|
|
57
|
-
switch (import_os$1.default.platform()) {
|
|
58
|
-
case "darwin": return import_path.default.join(import_os$1.default.homedir(), "Library/Application Support");
|
|
59
|
-
case "linux": return import_path.default.join(import_os$1.default.homedir(), ".local/share");
|
|
60
|
-
case "win32":
|
|
61
|
-
if (process.env.LOCALAPPDATA) return process.env.LOCALAPPDATA;
|
|
62
|
-
return null;
|
|
63
|
-
default: return null;
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
}));
|
|
67
|
-
|
|
68
|
-
//#endregion
|
|
69
|
-
//#region ../../node_modules/@vercel/oidc/dist/auth-config.js
|
|
70
|
-
var require_auth_config = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
71
|
-
var __create = Object.create;
|
|
72
|
-
var __defProp = Object.defineProperty;
|
|
73
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
74
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
75
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
76
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
77
|
-
var __export = (target, all) => {
|
|
78
|
-
for (var name in all) __defProp(target, name, {
|
|
79
|
-
get: all[name],
|
|
80
|
-
enumerable: true
|
|
81
|
-
});
|
|
82
|
-
};
|
|
83
|
-
var __copyProps = (to, from, except, desc) => {
|
|
84
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
85
|
-
for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
|
|
86
|
-
get: () => from[key],
|
|
87
|
-
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
88
|
-
});
|
|
89
|
-
}
|
|
90
|
-
return to;
|
|
91
|
-
};
|
|
92
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
93
|
-
value: mod,
|
|
94
|
-
enumerable: true
|
|
95
|
-
}) : target, mod));
|
|
96
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
97
|
-
var auth_config_exports = {};
|
|
98
|
-
__export(auth_config_exports, {
|
|
99
|
-
isValidAccessToken: () => isValidAccessToken,
|
|
100
|
-
readAuthConfig: () => readAuthConfig,
|
|
101
|
-
writeAuthConfig: () => writeAuthConfig
|
|
102
|
-
});
|
|
103
|
-
module.exports = __toCommonJS(auth_config_exports);
|
|
104
|
-
var fs$1 = __toESM(__require("fs"));
|
|
105
|
-
var path$1 = __toESM(__require("path"));
|
|
106
|
-
var import_token_util = require_token_util();
|
|
107
|
-
function getAuthConfigPath() {
|
|
108
|
-
const dataDir = (0, import_token_util.getVercelDataDir)();
|
|
109
|
-
if (!dataDir) throw new Error(`Unable to find Vercel CLI data directory. Your platform: ${process.platform}. Supported: darwin, linux, win32.`);
|
|
110
|
-
return path$1.join(dataDir, "auth.json");
|
|
111
|
-
}
|
|
112
|
-
function readAuthConfig() {
|
|
113
|
-
try {
|
|
114
|
-
const authPath = getAuthConfigPath();
|
|
115
|
-
if (!fs$1.existsSync(authPath)) return null;
|
|
116
|
-
const content = fs$1.readFileSync(authPath, "utf8");
|
|
117
|
-
if (!content) return null;
|
|
118
|
-
return JSON.parse(content);
|
|
119
|
-
} catch (error) {
|
|
120
|
-
return null;
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
function writeAuthConfig(config) {
|
|
124
|
-
const authPath = getAuthConfigPath();
|
|
125
|
-
const authDir = path$1.dirname(authPath);
|
|
126
|
-
if (!fs$1.existsSync(authDir)) fs$1.mkdirSync(authDir, {
|
|
127
|
-
mode: 504,
|
|
128
|
-
recursive: true
|
|
129
|
-
});
|
|
130
|
-
fs$1.writeFileSync(authPath, JSON.stringify(config, null, 2), { mode: 384 });
|
|
131
|
-
}
|
|
132
|
-
function isValidAccessToken(authConfig) {
|
|
133
|
-
if (!authConfig.token) return false;
|
|
134
|
-
if (typeof authConfig.expiresAt !== "number") return true;
|
|
135
|
-
const nowInSeconds = Math.floor(Date.now() / 1e3);
|
|
136
|
-
return authConfig.expiresAt >= nowInSeconds;
|
|
137
|
-
}
|
|
138
|
-
}));
|
|
139
|
-
|
|
140
|
-
//#endregion
|
|
141
|
-
//#region ../../node_modules/@vercel/oidc/dist/oauth.js
|
|
142
|
-
var require_oauth = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
143
|
-
var __defProp = Object.defineProperty;
|
|
144
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
145
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
146
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
147
|
-
var __export = (target, all) => {
|
|
148
|
-
for (var name in all) __defProp(target, name, {
|
|
149
|
-
get: all[name],
|
|
150
|
-
enumerable: true
|
|
151
|
-
});
|
|
152
|
-
};
|
|
153
|
-
var __copyProps = (to, from, except, desc) => {
|
|
154
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
155
|
-
for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
|
|
156
|
-
get: () => from[key],
|
|
157
|
-
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
158
|
-
});
|
|
159
|
-
}
|
|
160
|
-
return to;
|
|
161
|
-
};
|
|
162
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
163
|
-
var oauth_exports = {};
|
|
164
|
-
__export(oauth_exports, {
|
|
165
|
-
processTokenResponse: () => processTokenResponse,
|
|
166
|
-
refreshTokenRequest: () => refreshTokenRequest
|
|
167
|
-
});
|
|
168
|
-
module.exports = __toCommonJS(oauth_exports);
|
|
169
|
-
var import_os = __require("os");
|
|
170
|
-
const VERCEL_ISSUER = "https://vercel.com";
|
|
171
|
-
const VERCEL_CLI_CLIENT_ID = "cl_HYyOPBNtFMfHhaUn9L4QPfTZz6TP47bp";
|
|
172
|
-
const userAgent = `@vercel/oidc node-${process.version} ${(0, import_os.platform)()} (${(0, import_os.arch)()}) ${(0, import_os.hostname)()}`;
|
|
173
|
-
let _tokenEndpoint = null;
|
|
174
|
-
async function getTokenEndpoint() {
|
|
175
|
-
if (_tokenEndpoint) return _tokenEndpoint;
|
|
176
|
-
const discoveryUrl = `${VERCEL_ISSUER}/.well-known/openid-configuration`;
|
|
177
|
-
const response = await fetch(discoveryUrl, { headers: { "user-agent": userAgent } });
|
|
178
|
-
if (!response.ok) throw new Error("Failed to discover OAuth endpoints");
|
|
179
|
-
const metadata = await response.json();
|
|
180
|
-
if (!metadata || typeof metadata.token_endpoint !== "string") throw new Error("Invalid OAuth discovery response");
|
|
181
|
-
const endpoint = metadata.token_endpoint;
|
|
182
|
-
_tokenEndpoint = endpoint;
|
|
183
|
-
return endpoint;
|
|
184
|
-
}
|
|
185
|
-
async function refreshTokenRequest(options) {
|
|
186
|
-
const tokenEndpoint = await getTokenEndpoint();
|
|
187
|
-
return await fetch(tokenEndpoint, {
|
|
188
|
-
method: "POST",
|
|
189
|
-
headers: {
|
|
190
|
-
"Content-Type": "application/x-www-form-urlencoded",
|
|
191
|
-
"user-agent": userAgent
|
|
192
|
-
},
|
|
193
|
-
body: new URLSearchParams({
|
|
194
|
-
client_id: VERCEL_CLI_CLIENT_ID,
|
|
195
|
-
grant_type: "refresh_token",
|
|
196
|
-
...options
|
|
197
|
-
})
|
|
198
|
-
});
|
|
199
|
-
}
|
|
200
|
-
async function processTokenResponse(response) {
|
|
201
|
-
const json = await response.json();
|
|
202
|
-
if (!response.ok) {
|
|
203
|
-
const errorMsg = typeof json === "object" && json && "error" in json ? String(json.error) : "Token refresh failed";
|
|
204
|
-
return [new Error(errorMsg)];
|
|
205
|
-
}
|
|
206
|
-
if (typeof json !== "object" || json === null) return [/* @__PURE__ */ new Error("Invalid token response")];
|
|
207
|
-
if (typeof json.access_token !== "string") return [/* @__PURE__ */ new Error("Missing access_token in response")];
|
|
208
|
-
if (json.token_type !== "Bearer") return [/* @__PURE__ */ new Error("Invalid token_type in response")];
|
|
209
|
-
if (typeof json.expires_in !== "number") return [/* @__PURE__ */ new Error("Missing expires_in in response")];
|
|
210
|
-
return [null, json];
|
|
211
|
-
}
|
|
212
|
-
}));
|
|
213
|
-
|
|
214
|
-
//#endregion
|
|
215
|
-
//#region ../../node_modules/@vercel/oidc/dist/token-util.js
|
|
216
|
-
var require_token_util = /* @__PURE__ */ __commonJSMin(((exports, module) => {
|
|
217
|
-
var __create = Object.create;
|
|
218
|
-
var __defProp = Object.defineProperty;
|
|
219
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
220
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
221
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
222
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
223
|
-
var __export = (target, all) => {
|
|
224
|
-
for (var name in all) __defProp(target, name, {
|
|
225
|
-
get: all[name],
|
|
226
|
-
enumerable: true
|
|
227
|
-
});
|
|
228
|
-
};
|
|
229
|
-
var __copyProps = (to, from, except, desc) => {
|
|
230
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
231
|
-
for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
|
|
232
|
-
get: () => from[key],
|
|
233
|
-
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
234
|
-
});
|
|
235
|
-
}
|
|
236
|
-
return to;
|
|
237
|
-
};
|
|
238
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
239
|
-
value: mod,
|
|
240
|
-
enumerable: true
|
|
241
|
-
}) : target, mod));
|
|
242
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
243
|
-
var token_util_exports = {};
|
|
244
|
-
__export(token_util_exports, {
|
|
245
|
-
assertVercelOidcTokenResponse: () => assertVercelOidcTokenResponse,
|
|
246
|
-
findProjectInfo: () => findProjectInfo,
|
|
247
|
-
getTokenPayload: () => getTokenPayload,
|
|
248
|
-
getVercelCliToken: () => getVercelCliToken,
|
|
249
|
-
getVercelDataDir: () => getVercelDataDir,
|
|
250
|
-
getVercelOidcToken: () => getVercelOidcToken,
|
|
251
|
-
isExpired: () => isExpired,
|
|
252
|
-
loadToken: () => loadToken,
|
|
253
|
-
saveToken: () => saveToken
|
|
254
|
-
});
|
|
255
|
-
module.exports = __toCommonJS(token_util_exports);
|
|
256
|
-
var path = __toESM(__require("path"));
|
|
257
|
-
var fs = __toESM(__require("fs"));
|
|
258
|
-
var import_token_error = require_token_error();
|
|
259
|
-
var import_token_io = require_token_io();
|
|
260
|
-
var import_auth_config = require_auth_config();
|
|
261
|
-
var import_oauth = require_oauth();
|
|
262
|
-
function getVercelDataDir() {
|
|
263
|
-
const vercelFolder = "com.vercel.cli";
|
|
264
|
-
const dataDir = (0, import_token_io.getUserDataDir)();
|
|
265
|
-
if (!dataDir) return null;
|
|
266
|
-
return path.join(dataDir, vercelFolder);
|
|
267
|
-
}
|
|
268
|
-
async function getVercelCliToken() {
|
|
269
|
-
const authConfig = (0, import_auth_config.readAuthConfig)();
|
|
270
|
-
if (!authConfig) return null;
|
|
271
|
-
if ((0, import_auth_config.isValidAccessToken)(authConfig)) return authConfig.token || null;
|
|
272
|
-
if (!authConfig.refreshToken) {
|
|
273
|
-
(0, import_auth_config.writeAuthConfig)({});
|
|
274
|
-
return null;
|
|
275
|
-
}
|
|
276
|
-
try {
|
|
277
|
-
const tokenResponse = await (0, import_oauth.refreshTokenRequest)({ refresh_token: authConfig.refreshToken });
|
|
278
|
-
const [tokensError, tokens] = await (0, import_oauth.processTokenResponse)(tokenResponse);
|
|
279
|
-
if (tokensError || !tokens) {
|
|
280
|
-
(0, import_auth_config.writeAuthConfig)({});
|
|
281
|
-
return null;
|
|
282
|
-
}
|
|
283
|
-
const updatedConfig = {
|
|
284
|
-
token: tokens.access_token,
|
|
285
|
-
expiresAt: Math.floor(Date.now() / 1e3) + tokens.expires_in
|
|
286
|
-
};
|
|
287
|
-
if (tokens.refresh_token) updatedConfig.refreshToken = tokens.refresh_token;
|
|
288
|
-
(0, import_auth_config.writeAuthConfig)(updatedConfig);
|
|
289
|
-
return updatedConfig.token ?? null;
|
|
290
|
-
} catch (error) {
|
|
291
|
-
(0, import_auth_config.writeAuthConfig)({});
|
|
292
|
-
return null;
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
async function getVercelOidcToken(authToken, projectId, teamId) {
|
|
296
|
-
const url = `https://api.vercel.com/v1/projects/${projectId}/token?source=vercel-oidc-refresh${teamId ? `&teamId=${teamId}` : ""}`;
|
|
297
|
-
const res = await fetch(url, {
|
|
298
|
-
method: "POST",
|
|
299
|
-
headers: { Authorization: `Bearer ${authToken}` }
|
|
300
|
-
});
|
|
301
|
-
if (!res.ok) throw new import_token_error.VercelOidcTokenError(`Failed to refresh OIDC token: ${res.statusText}`);
|
|
302
|
-
const tokenRes = await res.json();
|
|
303
|
-
assertVercelOidcTokenResponse(tokenRes);
|
|
304
|
-
return tokenRes;
|
|
305
|
-
}
|
|
306
|
-
function assertVercelOidcTokenResponse(res) {
|
|
307
|
-
if (!res || typeof res !== "object") throw new TypeError("Vercel OIDC token is malformed. Expected an object. Please run `vc env pull` and try again");
|
|
308
|
-
if (!("token" in res) || typeof res.token !== "string") throw new TypeError("Vercel OIDC token is malformed. Expected a string-valued token property. Please run `vc env pull` and try again");
|
|
309
|
-
}
|
|
310
|
-
function findProjectInfo() {
|
|
311
|
-
const dir = (0, import_token_io.findRootDir)();
|
|
312
|
-
if (!dir) throw new import_token_error.VercelOidcTokenError("Unable to find project root directory. Have you linked your project with `vc link?`");
|
|
313
|
-
const prjPath = path.join(dir, ".vercel", "project.json");
|
|
314
|
-
if (!fs.existsSync(prjPath)) throw new import_token_error.VercelOidcTokenError("project.json not found, have you linked your project with `vc link?`");
|
|
315
|
-
const prj = JSON.parse(fs.readFileSync(prjPath, "utf8"));
|
|
316
|
-
if (typeof prj.projectId !== "string" && typeof prj.orgId !== "string") throw new TypeError("Expected a string-valued projectId property. Try running `vc link` to re-link your project.");
|
|
317
|
-
return {
|
|
318
|
-
projectId: prj.projectId,
|
|
319
|
-
teamId: prj.orgId
|
|
320
|
-
};
|
|
321
|
-
}
|
|
322
|
-
function saveToken(token, projectId) {
|
|
323
|
-
const dir = (0, import_token_io.getUserDataDir)();
|
|
324
|
-
if (!dir) throw new import_token_error.VercelOidcTokenError("Unable to find user data directory. Please reach out to Vercel support.");
|
|
325
|
-
const tokenPath = path.join(dir, "com.vercel.token", `${projectId}.json`);
|
|
326
|
-
const tokenJson = JSON.stringify(token);
|
|
327
|
-
fs.mkdirSync(path.dirname(tokenPath), {
|
|
328
|
-
mode: 504,
|
|
329
|
-
recursive: true
|
|
330
|
-
});
|
|
331
|
-
fs.writeFileSync(tokenPath, tokenJson);
|
|
332
|
-
fs.chmodSync(tokenPath, 432);
|
|
333
|
-
}
|
|
334
|
-
function loadToken(projectId) {
|
|
335
|
-
const dir = (0, import_token_io.getUserDataDir)();
|
|
336
|
-
if (!dir) throw new import_token_error.VercelOidcTokenError("Unable to find user data directory. Please reach out to Vercel support.");
|
|
337
|
-
const tokenPath = path.join(dir, "com.vercel.token", `${projectId}.json`);
|
|
338
|
-
if (!fs.existsSync(tokenPath)) return null;
|
|
339
|
-
const token = JSON.parse(fs.readFileSync(tokenPath, "utf8"));
|
|
340
|
-
assertVercelOidcTokenResponse(token);
|
|
341
|
-
return token;
|
|
342
|
-
}
|
|
343
|
-
function getTokenPayload(token) {
|
|
344
|
-
const tokenParts = token.split(".");
|
|
345
|
-
if (tokenParts.length !== 3) throw new import_token_error.VercelOidcTokenError("Invalid token. Please run `vc env pull` and try again");
|
|
346
|
-
const base64 = tokenParts[1].replace(/-/g, "+").replace(/_/g, "/");
|
|
347
|
-
const padded = base64.padEnd(base64.length + (4 - base64.length % 4) % 4, "=");
|
|
348
|
-
return JSON.parse(Buffer.from(padded, "base64").toString("utf8"));
|
|
349
|
-
}
|
|
350
|
-
function isExpired(token) {
|
|
351
|
-
return token.exp * 1e3 < Date.now();
|
|
352
|
-
}
|
|
353
|
-
}));
|
|
354
|
-
|
|
355
|
-
//#endregion
|
|
356
|
-
export { require_token_util as t };
|