nemar-cli 0.3.2-dev.37 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +2 -2
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -118,7 +118,7 @@ Approving user: ${D}
118
118
  Revoking access for: ${D}
119
119
  `)),console.log(z.yellow("This will:")),console.log(" 1. Invalidate all API keys for this user"),console.log(" 2. Remove them from datasets they have access to"),console.log(" 3. Send them a notification email"),console.log();let{confirm:$}=await uD.prompt([{type:"input",name:"confirm",message:`Type '${D}' to confirm revocation:`,validate:(J)=>{if(J!==D)return"Username does not match";return!0}}]),B=_(`Revoking ${D}...`).start();try{await Cv(D),B.succeed(`Revoked access for ${D}`)}catch(J){if(J instanceof YD){if(B.fail(J.message),J.statusCode===403)console.log(z.gray(" This command requires admin privileges"));else if(J.statusCode===404)console.log(z.gray(" User not found"))}else B.fail("Failed to revoke user")}});c8.command("regenerate-iam").description("Regenerate AWS IAM credentials for a user").argument("<username>","Username to regenerate credentials for").action(async(D)=>{if(!j6())return;console.log(z.yellow(`
120
120
  Regenerate IAM credentials for: ${D}
121
- `)),console.log("This will:"),console.log(" 1. Create new AWS IAM access keys for the user"),console.log(" 2. Invalidate any existing access keys"),console.log(" 3. Restore S3 access to their datasets"),console.log(),console.log(z.gray("Use this if a user's credentials were compromised or lost.")),console.log();let{confirm:F}=await uD.prompt([{type:"confirm",name:"confirm",message:`Regenerate IAM credentials for ${D}?`,default:!1}]);if(!F){console.log(z.gray("Cancelled"));return}let $=_(`Regenerating IAM credentials for ${D}...`).start();try{let B=await Rv(D);if($.succeed(`Regenerated IAM credentials for ${D}`),console.log(),console.log(` IAM Username: ${z.cyan(B.user.iam_username)}`),B.user.is_admin)console.log(` Admin: ${z.magenta("yes (full bucket access)")}`);if(console.log(` Datasets restored: ${z.green(B.datasets_restored)}`),B.warning)console.log(),console.log(z.yellow(` Warning: ${B.warning}`)),console.log(z.gray(" Please verify old credentials are revoked in AWS console."));console.log(),console.log(z.gray("The user can now upload to their datasets again."))}catch(B){if(B instanceof YD){if($.fail(B.message),B.statusCode===403)console.log(z.gray(" This command requires admin privileges"));else if(B.statusCode===404)console.log(z.gray(" User not found or not approved"))}else{$.fail("Failed to regenerate IAM credentials");let J=B instanceof Error?B.message:String(B);console.log(z.gray(` Error details: ${J}`))}}});var zE=new f0("doi").description("DOI management");zE.command("create").description("Create concept DOI for a dataset").argument("<dataset-id>","Dataset ID (e.g., nm000104)").option("--title <title>","DOI title (defaults to dataset name)").option("--description <desc>","DOI description").option("--sandbox","Use Zenodo sandbox for testing").action(async(D,F)=>{if(!j6())return;let $=_("Fetching dataset info...").start(),B;try{B=await WF(D),$.succeed(`Found dataset: ${B.name}`)}catch(Q){if(Q instanceof YD){if($.fail(Q.message),Q.statusCode===404)console.log(z.gray(" Dataset not found"))}else $.fail("Failed to fetch dataset");return}try{let Q=await WE(D);if(Q.concept_doi){if(console.log(z.yellow(`
121
+ `)),console.log("This will:"),console.log(" 1. Create new AWS IAM access keys for the user"),console.log(" 2. Invalidate any existing access keys"),console.log(" 3. Restore S3 access to their datasets"),console.log(),console.log(z.gray("Use this if a user's credentials were compromised or lost.")),console.log();let{confirm:F}=await uD.prompt([{type:"confirm",name:"confirm",message:`Regenerate IAM credentials for ${D}?`,default:!1}]);if(!F){console.log(z.gray("Cancelled"));return}let $=_(`Regenerating IAM credentials for ${D}...`).start();try{let B=await Rv(D);$.succeed(`Regenerated IAM credentials for ${D}`),console.log(),console.log(` IAM Username: ${z.cyan(B.user.iam_username)}`),console.log(` Datasets restored: ${z.green(B.datasets_restored)}`),console.log(),console.log(z.gray("The user can now upload to their datasets again."))}catch(B){if(B instanceof YD){if($.fail(B.message),B.statusCode===403)console.log(z.gray(" This command requires admin privileges"));else if(B.statusCode===404)console.log(z.gray(" User not found or not approved"))}else $.fail("Failed to regenerate IAM credentials")}});var zE=new f0("doi").description("DOI management");zE.command("create").description("Create concept DOI for a dataset").argument("<dataset-id>","Dataset ID (e.g., nm000104)").option("--title <title>","DOI title (defaults to dataset name)").option("--description <desc>","DOI description").option("--sandbox","Use Zenodo sandbox for testing").action(async(D,F)=>{if(!j6())return;let $=_("Fetching dataset info...").start(),B;try{B=await WF(D),$.succeed(`Found dataset: ${B.name}`)}catch(Q){if(Q instanceof YD){if($.fail(Q.message),Q.statusCode===404)console.log(z.gray(" Dataset not found"))}else $.fail("Failed to fetch dataset");return}try{let Q=await WE(D);if(Q.concept_doi){if(console.log(z.yellow(`
122
122
  Dataset already has a concept DOI:`)),console.log(` Concept DOI: ${z.cyan(Q.concept_doi)}`),Q.zenodo_concept_url)console.log(` Zenodo URL: ${Q.zenodo_concept_url}`);return}}catch{}if(console.log(),console.log(z.cyan("Dataset Information:")),console.log(` ID: ${B.dataset_id}`),console.log(` Name: ${B.name}`),B.github_repo)console.log(` GitHub: ${B.github_repo}`);if(F.sandbox)console.log(` Mode: ${z.yellow("SANDBOX (test DOI)")}`);console.log(),console.log(z.red("WARNING: DOIs are PERMANENT and cannot be deleted!")),console.log(z.gray("The DOI will be pre-reserved but not published until the first version release.")),console.log();let{confirm:J}=await uD.prompt([{type:"confirm",name:"confirm",message:F.sandbox?"Create test concept DOI on Zenodo sandbox?":"Create concept DOI on Zenodo?",default:!1}]);if(!J){console.log(z.gray("Cancelled"));return}let Y=_("Creating concept DOI on Zenodo...").start();try{let Q=await Ov(D,{title:F.title,description:F.description,sandbox:F.sandbox});if(Y.succeed("Concept DOI created successfully"),console.log(),console.log(z.green("DOI Information:")),console.log(` Concept DOI: ${z.cyan(Q.concept_doi)}`),console.log(` Zenodo URL: ${Q.zenodo_url}`),console.log(),console.log(z.yellow("Next steps:")),console.log(" 1. Set up automatic DOI publishing by running:"),console.log(z.gray(` ${Q.setup_command}`)),console.log(" (paste the webhook token when prompted)"),console.log(),console.log(" 2. Update dataset_description.json with DatasetDOI field"),console.log(" 3. Create a PR and merge it to trigger version DOI publication"),console.log(),F.sandbox)console.log(z.gray("Note: This is a sandbox DOI and will not resolve in production."))}catch(Q){if(Q instanceof YD){if(Y.fail(Q.message),Q.statusCode===403)console.log(z.gray(" This command requires admin privileges"))}else Y.fail("Failed to create concept DOI"),console.log(z.gray(` ${Q instanceof Error?Q.message:"Unknown error"}`))}});zE.command("info").description("Get DOI info for a dataset").argument("<dataset-id>","Dataset ID (e.g., nm000104)").action(async(D)=>{if(!j6())return;let F=_("Fetching DOI info...").start();try{let $=await WE(D);if(F.stop(),console.log(),console.log(z.cyan(`DOI Information for ${D}:`)),console.log(` Dataset Name: ${$.name}`),console.log(),$.concept_doi){if(console.log(z.green("Concept DOI:")),console.log(` DOI: ${$.concept_doi}`),console.log(` URL: https://doi.org/${$.concept_doi}`),$.zenodo_concept_url)console.log(` Zenodo: ${$.zenodo_concept_url}`)}else console.log(z.yellow("No concept DOI created yet")),console.log(z.gray(" Use 'nemar admin doi create' to create one"));if(console.log(),$.latest_version_doi){if(console.log(z.green("Latest Version DOI:")),console.log(` DOI: ${$.latest_version_doi}`),console.log(` URL: https://doi.org/${$.latest_version_doi}`),$.zenodo_latest_version_url)console.log(` Zenodo: ${$.zenodo_latest_version_url}`)}else if($.concept_doi)console.log(z.yellow("No version DOI published yet")),console.log(z.gray(" Version DOIs are created automatically on PR merge"))}catch($){if($ instanceof YD){if(F.fail($.message),$.statusCode===404)console.log(z.gray(" Dataset not found"));else if($.statusCode===403)console.log(z.gray(" This command requires admin privileges"))}else F.fail("Failed to fetch DOI info")}});c8.addCommand(zE);c8.command("revert").description("Revert a dataset to a previous version (creates PR for review)").argument("<dataset-id>","Dataset ID (e.g., nm000104)").argument("[version]","Target version to revert to (e.g., 1.0.0)").option("--list","List available versions without reverting").option("--force","Direct push to main without PR (emergency only)").option("--message <msg>","Custom revert commit message").option("--dir <path>","Use existing local clone instead of cloning fresh").action(async(D,F,$)=>{if(!j6())return;let B=await M$();if(!B.allPassed){console.log(z.red("Error: Missing prerequisites"));for(let v of B.errors)console.log(z.gray(` - ${v}`));return}let J,Y=!0;if($.dir){if(!fED($.dir)){console.log(z.red(`Error: Directory not found: ${$.dir}`));return}J=$.dir,Y=!1}else J=gED(process.cwd(),`${D}-revert-${Date.now()}`);let Q=_("Fetching dataset info...").start(),X;try{X=await WF(D),Q.succeed(`Found dataset: ${X.name}`)}catch(v){if(v instanceof YD){if(Q.fail(v.message),v.statusCode===404)console.log(z.gray(" Dataset not found"))}else Q.fail("Failed to fetch dataset");return}if(!X.github_repo){console.log(z.red("Error: Dataset has no GitHub repository"));return}if(Y){let v=_(`Cloning ${D}...`).start(),O=`https://github.com/${X.github_repo}.git`,QD=await N$(O,J);if(!QD.success){v.fail(`Clone failed: ${QD.error}`);return}v.succeed(`Cloned to ${J}`)}let E=await uv(J);if(E.length===0){console.log(z.yellow("No versions found for this dataset")),console.log(z.gray(" Dataset may not have any tagged releases yet"));return}if($.list){console.log(`
123
123
  ${z.cyan("Available Versions:")}
124
124
  `);for(let v of E)console.log(` ${z.green(v.version)} ${z.gray(v.date)} ${z.gray(v.commit)}`);return}let G=F;if(!G){console.log(`
@@ -332,7 +332,7 @@ Examples:
332
332
  $ nemar sandbox # Run sandbox training
333
333
  $ nemar sandbox status # Check if training is completed
334
334
  $ nemar sandbox reset # Reset for re-training
335
- `).action(sED);async function sED(){if(console.log(),console.log(z.bold("NEMAR Sandbox Training")),console.log(z.gray("Verify your setup and learn the upload workflow")),console.log(),!yD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}let D=rD();if(D.sandboxCompleted){console.log(z.green("Sandbox training already completed!")),console.log(z.gray(`Dataset ID: ${D.sandboxDatasetId}`)),console.log(),console.log("You can upload real datasets with:"),console.log(z.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(z.gray("To re-run training, use: nemar sandbox reset"));return}console.log(z.bold("Step 1/6: Checking prerequisites..."));let F=_("Checking DataLad, git-annex, and SSH...").start(),$=await G$();if(!$.allPassed){F.fail("Prerequisites check failed"),console.log(),console.log(z.red("Missing requirements:"));for(let U of $.errors)console.log(z.yellow(` - ${U}`));if(!$.githubSSH.accessible)console.log(z.gray(" Run 'nemar auth setup-ssh' to configure SSH"));return}F.succeed("All prerequisites met"),console.log(),console.log(z.bold("Step 2/6: Generating test dataset..."));let B=_("Creating minimal BIDS structure...").start(),J;try{let U=Dy();J=U.root;let M=Fy(U);B.succeed(`Test dataset created (${V$(M)})`),console.log(z.gray(` Location: ${J}`))}catch(U){B.fail("Failed to generate test dataset"),console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`));return}console.log(),console.log(z.bold("Step 3/6: Registering sandbox dataset..."));let Y=_("Creating dataset on NEMAR...").start(),Q,X,E,G,H;try{let U=await X$({name:"Sandbox Training Dataset",description:"Placeholder dataset for sandbox training",files:[{path:"sub-01/eeg/sub-01_task-rest_eeg.edf",size:512000,type:"data"},{path:"dataset_description.json",size:200,type:"metadata"},{path:"participants.tsv",size:50,type:"metadata"},{path:"README",size:500,type:"metadata"},{path:"sub-01/eeg/sub-01_task-rest_eeg.json",size:300,type:"metadata"}],sandbox:!0});Q=U.dataset.dataset_id,X=U.dataset.ssh_url,E=U.s3_config,G=U.dataset.s3_prefix,H=U.upload_urls||{},Y.succeed(`Sandbox dataset created: ${z.cyan(Q)}`),console.log(z.gray(` GitHub: ${U.dataset.github_url}`)),await new Promise((M)=>setTimeout(M,3000))}catch(U){if(Y.fail("Failed to create sandbox dataset"),U instanceof YD)console.log(z.red(` ${U.message}`));else console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`));k1(J);return}console.log(),console.log(z.bold("Step 4/6: Initializing repository..."));let q=_("Setting up DataLad and git-annex...").start();try{await H$(J),await W$(J),await q$(J,X),q.succeed("Repository initialized")}catch(U){q.fail("Failed to initialize repository"),console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`)),k1(J);return}if(console.log(),console.log(z.bold("Step 5/6: Uploading to S3...")),Object.keys(H).length===0)console.log(z.yellow(" No data files to upload (metadata only)"));else{let U=_("Uploading test data...").start();try{let N=0,Z=Object.keys(H).length,C=await A$(J,H,{jobs:4,onProgress:(w)=>{if(w.status==="completed"||w.status==="failed")N++,U.text=`Uploading... ${N}/${Z} files`}});if(C.failed.length>0){U.fail(`Upload failed for ${C.failed.length} file(s)`);for(let w of C.failed)console.log(z.red(` Failed: ${w}`));if(C.error)console.log(z.red(` Error: ${C.error}`));console.log(),console.log(z.yellow("Sandbox training aborted due to upload failures.")),console.log(z.gray("Please check your network connection and try again.")),k1(J);return}U.succeed(`Uploaded ${C.uploaded} file(s)`)}catch(N){U.fail("Upload failed"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),k1(J);return}let M=_("Registering file URLs...").start();try{let N={};for(let C of Object.keys(H))N[C]=`${E.public_url}/${G}/${C}`;let Z=await U$(J,N);if(!Z.success){M.fail(`URL registration failed for ${Z.failed.length} file(s)`);for(let C of Z.failed)console.log(z.red(` Failed: ${C}`));console.log(),console.log(z.yellow("Sandbox training aborted due to URL registration failures.")),console.log(z.gray("This may indicate a git-annex configuration issue.")),k1(J);return}M.succeed(`Registered ${Z.registered} file URLs`)}catch(N){M.fail("Failed to register URLs"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),k1(J);return}}console.log(),console.log(z.bold("Step 6/6: Pushing to GitHub..."));let V=_("Saving and pushing...").start();try{await K$(J,"Initial sandbox training upload"),await z$(J),V.succeed("Pushed to GitHub")}catch(U){V.fail("Failed to push to GitHub"),console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`)),k1(J);return}let A=_("Finalizing...").start();try{await E$(Q),await Pv(Q),pD("sandboxCompleted",!0),pD("sandboxDatasetId",Q),A.succeed("Sandbox training complete!")}catch(U){A.fail("Failed to finalize"),console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`)),k1(J);return}k1(J),console.log(),console.log(z.green.bold("Congratulations! Sandbox training completed successfully.")),console.log(),console.log("Your setup is verified and you're ready to upload real datasets:"),console.log(z.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(z.gray(`Sandbox dataset: ${Q}`))}O$.command("status").description("Check sandbox training completion status").option("--refresh","Fetch latest status from server").action(async(D)=>{if(!yD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}if(D.refresh){let F=_("Checking status...").start();try{let $=await kv();if(pD("sandboxCompleted",$.sandbox_completed),$.sandbox_dataset_id)pD("sandboxDatasetId",$.sandbox_dataset_id);if(F.stop(),$.sandbox_completed){if(console.log(z.green("Sandbox training: Completed")),console.log(z.gray(` Dataset ID: ${$.sandbox_dataset_id}`)),$.sandbox_completed_at)console.log(z.gray(` Completed: ${$.sandbox_completed_at}`))}else console.log(z.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox"))}catch($){if(F.fail("Failed to check status"),$ instanceof YD)console.log(z.red(` ${$.message}`))}}else{let F=rD();if(F.sandboxCompleted)console.log(z.green("Sandbox training: Completed")),console.log(z.gray(` Dataset ID: ${F.sandboxDatasetId}`));else console.log(z.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox"))}});O$.command("reset").description("Reset sandbox training status for re-training").option("-f, --force","Skip confirmation prompt").action(async(D)=>{if(!yD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}if(!rD().sandboxCompleted){console.log(z.yellow("Sandbox training not yet completed")),console.log(z.gray("Nothing to reset"));return}if(!D.force){let B=(await Promise.resolve().then(() => (q3(),FI))).default,{confirm:J}=await B.prompt([{type:"confirm",name:"confirm",message:"Reset sandbox training status? You will need to complete training again.",default:!1}]);if(!J){console.log(z.gray("Cancelled"));return}}let $=_("Resetting sandbox status...").start();try{await Sv(),pD("sandboxCompleted",!1),pD("sandboxDatasetId",void 0),$.succeed("Sandbox status reset"),console.log(),console.log("Run sandbox training again with:"),console.log(z.cyan(" nemar sandbox"))}catch(B){if($.fail("Failed to reset"),B instanceof YD)console.log(z.red(` ${B.message}`));else console.log(z.red(` ${B instanceof Error?B.message:"Unknown error"}`))}});var $y={name:"nemar-cli",version:"0.3.2-dev.37",description:"CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",type:"module",main:"dist/index.js",bin:{nemar:"dist/index.js"},scripts:{dev:"bun run src/index.ts",build:"bun build src/index.ts --outdir dist --target bun --minify && sed '1s|#!/usr/bin/env node|#!/usr/bin/env bun|' dist/index.js > dist/index.js.tmp && mv dist/index.js.tmp dist/index.js",test:"bun test",lint:"biome check src/","lint:fix":"biome check --fix src/",format:"biome format --write src/",typecheck:"tsc --noEmit",prepublishOnly:"bun run build","docs:generate":"bun run scripts/generate-docs.ts","docs:serve":"mkdocs serve","docs:build":"mkdocs build"},keywords:["nemar","bids","neuroimaging","eeg","emg","datalad","cli"],author:"NEMAR Team",license:"MIT",repository:{type:"git",url:"git+https://github.com/nemarDatasets/nemar-cli.git"},bugs:{url:"https://github.com/nemarDatasets/nemar-cli/issues"},homepage:"https://nemar-cli.pages.dev",engines:{bun:">=1.0.0"},files:["dist","README.md","LICENSE"],dependencies:{chalk:"^5.3.0",commander:"^12.1.0",conf:"^13.0.1",inquirer:"^9.2.15",ora:"^8.0.1",zod:"^3.23.8"},devDependencies:{"@biomejs/biome":"^1.9.4","@types/bcryptjs":"^3.0.0","@types/bun":"latest","@types/inquirer":"^9.0.7",bcryptjs:"^3.0.3",typescript:"^5.5.4"}};var By=$y.version;var Q1=new f0;Q1.name("nemar").description(`CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource)
335
+ `).action(sED);async function sED(){if(console.log(),console.log(z.bold("NEMAR Sandbox Training")),console.log(z.gray("Verify your setup and learn the upload workflow")),console.log(),!yD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}let D=rD();if(D.sandboxCompleted){console.log(z.green("Sandbox training already completed!")),console.log(z.gray(`Dataset ID: ${D.sandboxDatasetId}`)),console.log(),console.log("You can upload real datasets with:"),console.log(z.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(z.gray("To re-run training, use: nemar sandbox reset"));return}console.log(z.bold("Step 1/6: Checking prerequisites..."));let F=_("Checking DataLad, git-annex, and SSH...").start(),$=await G$();if(!$.allPassed){F.fail("Prerequisites check failed"),console.log(),console.log(z.red("Missing requirements:"));for(let U of $.errors)console.log(z.yellow(` - ${U}`));if(!$.githubSSH.accessible)console.log(z.gray(" Run 'nemar auth setup-ssh' to configure SSH"));return}F.succeed("All prerequisites met"),console.log(),console.log(z.bold("Step 2/6: Generating test dataset..."));let B=_("Creating minimal BIDS structure...").start(),J;try{let U=Dy();J=U.root;let M=Fy(U);B.succeed(`Test dataset created (${V$(M)})`),console.log(z.gray(` Location: ${J}`))}catch(U){B.fail("Failed to generate test dataset"),console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`));return}console.log(),console.log(z.bold("Step 3/6: Registering sandbox dataset..."));let Y=_("Creating dataset on NEMAR...").start(),Q,X,E,G,H;try{let U=await X$({name:"Sandbox Training Dataset",description:"Placeholder dataset for sandbox training",files:[{path:"sub-01/eeg/sub-01_task-rest_eeg.edf",size:512000,type:"data"},{path:"dataset_description.json",size:200,type:"metadata"},{path:"participants.tsv",size:50,type:"metadata"},{path:"README",size:500,type:"metadata"},{path:"sub-01/eeg/sub-01_task-rest_eeg.json",size:300,type:"metadata"}],sandbox:!0});Q=U.dataset.dataset_id,X=U.dataset.ssh_url,E=U.s3_config,G=U.dataset.s3_prefix,H=U.upload_urls||{},Y.succeed(`Sandbox dataset created: ${z.cyan(Q)}`),console.log(z.gray(` GitHub: ${U.dataset.github_url}`)),await new Promise((M)=>setTimeout(M,3000))}catch(U){if(Y.fail("Failed to create sandbox dataset"),U instanceof YD)console.log(z.red(` ${U.message}`));else console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`));k1(J);return}console.log(),console.log(z.bold("Step 4/6: Initializing repository..."));let q=_("Setting up DataLad and git-annex...").start();try{await H$(J),await W$(J),await q$(J,X),q.succeed("Repository initialized")}catch(U){q.fail("Failed to initialize repository"),console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`)),k1(J);return}if(console.log(),console.log(z.bold("Step 5/6: Uploading to S3...")),Object.keys(H).length===0)console.log(z.yellow(" No data files to upload (metadata only)"));else{let U=_("Uploading test data...").start();try{let N=0,Z=Object.keys(H).length,C=await A$(J,H,{jobs:4,onProgress:(w)=>{if(w.status==="completed"||w.status==="failed")N++,U.text=`Uploading... ${N}/${Z} files`}});if(C.failed.length>0){U.fail(`Upload failed for ${C.failed.length} file(s)`);for(let w of C.failed)console.log(z.red(` Failed: ${w}`));if(C.error)console.log(z.red(` Error: ${C.error}`));console.log(),console.log(z.yellow("Sandbox training aborted due to upload failures.")),console.log(z.gray("Please check your network connection and try again.")),k1(J);return}U.succeed(`Uploaded ${C.uploaded} file(s)`)}catch(N){U.fail("Upload failed"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),k1(J);return}let M=_("Registering file URLs...").start();try{let N={};for(let C of Object.keys(H))N[C]=`${E.public_url}/${G}/${C}`;let Z=await U$(J,N);if(!Z.success){M.fail(`URL registration failed for ${Z.failed.length} file(s)`);for(let C of Z.failed)console.log(z.red(` Failed: ${C}`));console.log(),console.log(z.yellow("Sandbox training aborted due to URL registration failures.")),console.log(z.gray("This may indicate a git-annex configuration issue.")),k1(J);return}M.succeed(`Registered ${Z.registered} file URLs`)}catch(N){M.fail("Failed to register URLs"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),k1(J);return}}console.log(),console.log(z.bold("Step 6/6: Pushing to GitHub..."));let V=_("Saving and pushing...").start();try{await K$(J,"Initial sandbox training upload"),await z$(J),V.succeed("Pushed to GitHub")}catch(U){V.fail("Failed to push to GitHub"),console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`)),k1(J);return}let A=_("Finalizing...").start();try{await E$(Q),await Pv(Q),pD("sandboxCompleted",!0),pD("sandboxDatasetId",Q),A.succeed("Sandbox training complete!")}catch(U){A.fail("Failed to finalize"),console.log(z.red(` ${U instanceof Error?U.message:"Unknown error"}`)),k1(J);return}k1(J),console.log(),console.log(z.green.bold("Congratulations! Sandbox training completed successfully.")),console.log(),console.log("Your setup is verified and you're ready to upload real datasets:"),console.log(z.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(z.gray(`Sandbox dataset: ${Q}`))}O$.command("status").description("Check sandbox training completion status").option("--refresh","Fetch latest status from server").action(async(D)=>{if(!yD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}if(D.refresh){let F=_("Checking status...").start();try{let $=await kv();if(pD("sandboxCompleted",$.sandbox_completed),$.sandbox_dataset_id)pD("sandboxDatasetId",$.sandbox_dataset_id);if(F.stop(),$.sandbox_completed){if(console.log(z.green("Sandbox training: Completed")),console.log(z.gray(` Dataset ID: ${$.sandbox_dataset_id}`)),$.sandbox_completed_at)console.log(z.gray(` Completed: ${$.sandbox_completed_at}`))}else console.log(z.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox"))}catch($){if(F.fail("Failed to check status"),$ instanceof YD)console.log(z.red(` ${$.message}`))}}else{let F=rD();if(F.sandboxCompleted)console.log(z.green("Sandbox training: Completed")),console.log(z.gray(` Dataset ID: ${F.sandboxDatasetId}`));else console.log(z.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox"))}});O$.command("reset").description("Reset sandbox training status for re-training").option("-f, --force","Skip confirmation prompt").action(async(D)=>{if(!yD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}if(!rD().sandboxCompleted){console.log(z.yellow("Sandbox training not yet completed")),console.log(z.gray("Nothing to reset"));return}if(!D.force){let B=(await Promise.resolve().then(() => (q3(),FI))).default,{confirm:J}=await B.prompt([{type:"confirm",name:"confirm",message:"Reset sandbox training status? You will need to complete training again.",default:!1}]);if(!J){console.log(z.gray("Cancelled"));return}}let $=_("Resetting sandbox status...").start();try{await Sv(),pD("sandboxCompleted",!1),pD("sandboxDatasetId",void 0),$.succeed("Sandbox status reset"),console.log(),console.log("Run sandbox training again with:"),console.log(z.cyan(" nemar sandbox"))}catch(B){if($.fail("Failed to reset"),B instanceof YD)console.log(z.red(` ${B.message}`));else console.log(z.red(` ${B instanceof Error?B.message:"Unknown error"}`))}});var $y={name:"nemar-cli",version:"0.3.2",description:"CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",type:"module",main:"dist/index.js",bin:{nemar:"dist/index.js"},scripts:{dev:"bun run src/index.ts",build:"bun build src/index.ts --outdir dist --target bun --minify && sed '1s|#!/usr/bin/env node|#!/usr/bin/env bun|' dist/index.js > dist/index.js.tmp && mv dist/index.js.tmp dist/index.js",test:"bun test",lint:"biome check src/","lint:fix":"biome check --fix src/",format:"biome format --write src/",typecheck:"tsc --noEmit",prepublishOnly:"bun run build","docs:generate":"bun run scripts/generate-docs.ts","docs:serve":"mkdocs serve","docs:build":"mkdocs build"},keywords:["nemar","bids","neuroimaging","eeg","emg","datalad","cli"],author:"NEMAR Team",license:"MIT",repository:{type:"git",url:"git+https://github.com/nemarDatasets/nemar-cli.git"},bugs:{url:"https://github.com/nemarDatasets/nemar-cli/issues"},homepage:"https://nemar-cli.pages.dev",engines:{bun:">=1.0.0"},files:["dist","README.md","LICENSE"],dependencies:{chalk:"^5.3.0",commander:"^12.1.0",conf:"^13.0.1",inquirer:"^9.2.15",ora:"^8.0.1",zod:"^3.23.8"},devDependencies:{"@biomejs/biome":"^1.9.4","@types/bcryptjs":"^3.0.0","@types/bun":"latest","@types/inquirer":"^9.0.7",bcryptjs:"^3.0.3",typescript:"^5.5.4"}};var By=$y.version;var Q1=new f0;Q1.name("nemar").description(`CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource)
336
336
 
337
337
  NEMAR is a curated repository for neurophysiology data in BIDS format.
338
338
  This CLI provides tools for uploading, downloading, and managing datasets.`).version(By,"-v, --version","Output the current version").option("--no-color","Disable colored output").option("--verbose","Enable verbose output").addHelpText("after",`
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nemar-cli",
3
- "version": "0.3.2-dev.37",
3
+ "version": "0.3.2",
4
4
  "description": "CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",