nemar-cli 0.3.3-dev.44 → 0.3.3-dev.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +3 -3
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -111,7 +111,7 @@ Fix one of these issues:
111
111
  2. Install and authenticate gh CLI (automatic fallback):
112
112
  gh auth login
113
113
 
114
- Choose option 2 for quick setup, or option 1 if you have multiple GitHub accounts.`}}}}try{let{stdout:Y}=await QD(["git","remote","get-url",B],{cwd:D});if(Y.trim()){let{stderr:Q,exitCode:X}=await QD(["git","remote","set-url",B,J],{cwd:D});if(X!==0)return{success:!1,error:Q.trim()}}else{let{stderr:Q,exitCode:X}=await QD(["git","remote","add",B,J],{cwd:D});if(X!==0)return{success:!1,error:Q.trim()}}return{success:!0}}catch(Y){return{success:!1,error:Y.message}}}async function K$(D,F){try{let{stderr:$,exitCode:B}=await QD(["datalad","save","-m",F],{cwd:D});if(B!==0){if($.includes("nothing to save")||$.includes("no changes"))return{success:!0};return{success:!1,error:$.trim()||"Failed to save dataset"}}return{success:!0}}catch($){return{success:!1,error:$.message}}}async function z$(D,F="origin",$="main"){try{let{stderr:B,exitCode:J}=await QD(["git","push","-u",F,$],{cwd:D});if(J!==0)return{success:!1,error:B.trim()||"Failed to push to GitHub"};let{stderr:Y,exitCode:Q}=await QD(["git","push",F,"git-annex"],{cwd:D});if(Q!==0)console.warn("Warning: Could not push git-annex branch:",Y.trim());return{success:!0}}catch(B){return{success:!1,error:B.message}}}function V$(D){if(D===0)return"0 B";let F=1024,$=["B","KB","MB","GB","TB"],B=Math.floor(Math.log(D)/Math.log(F));return`${Number.parseFloat((D/F**B).toFixed(2))} ${$[B]}`}async function _ED(D,F,$){try{let B=await Bun.file(D).arrayBuffer(),J=B.byteLength,Y=await fetch(F,{method:"PUT",body:B,headers:{"Content-Length":J.toString()}});if(!Y.ok){let Q=await Y.text();return{success:!1,error:`Upload failed: ${Y.status} ${Q}`}}return $?.(J,J),{success:!0}}catch(B){return{success:!1,error:B.message}}}async function A$(D,F,$={}){let B=$.jobs||4,J=Object.entries(F),Y=[],Q=0;for(let X=0;X<J.length;X+=B){let E=J.slice(X,X+B),G=await Promise.all(E.map(async([W,q])=>{let V=qE(D,W);$.onProgress?.({file:W,uploaded:0,total:0,status:"uploading"});let A=await _ED(V,q);if(A.success)Q++,$.onProgress?.({file:W,uploaded:1,total:1,status:"completed"});else Y.push(`${W}: ${A.error||"Unknown error"}`),$.onProgress?.({file:W,uploaded:0,total:1,status:"failed",error:A.error});return{path:W,...A}}))}return{success:Y.length===0,uploaded:Q,failed:Y,error:Y.length>0?`${Y.length} files failed to upload`:void 0}}async function fED(D,F,$){try{if((await QD(["git","annex","add",F],{cwd:D})).exitCode!==0);let J=await QD(["git","annex","lookupkey",F],{cwd:D});if(J.exitCode!==0||!J.stdout.trim())return{success:!1,error:`Could not get git-annex key for ${F}`};let Y=J.stdout.trim(),Q=await QD(["git","annex","registerurl",Y,$],{cwd:D});if(Q.exitCode!==0)return{success:!1,error:`Failed to register URL: ${Q.stderr}`};return{success:!0}}catch(B){return{success:!1,error:B.message}}}async function N$(D,F,$){let B=0,J=[];for(let[Y,Q]of Object.entries(F))if((await fED(D,Y,Q)).success)B++,$?.(Y,!0);else J.push(Y),$?.(Y,!1);return{success:J.length===0,registered:B,failed:J}}async function Z$(){let[D,F]=await Promise.all([fv(),gv()]),$=[];if(!D.installed)$.push("DataLad is not installed. Install: pip install datalad");else if(D.compatible===!1)$.push(`DataLad version ${D.version} is too old. Required: >= ${D.minVersion}`);if(!F.installed)$.push("git-annex is not installed. Install: brew install git-annex (macOS) or apt install git-annex (Linux)");else if(F.compatible===!1)$.push(`git-annex version ${F.version} is too old. Required: >= ${F.minVersion}`);return{datalad:D,gitAnnex:F,allPassed:$.length===0,errors:$}}async function U$(D,F){try{let{stderr:$,exitCode:B}=await QD(["datalad","clone",D,F]);if(B!==0)return{success:!1,error:$.trim()||"Failed to clone dataset"};return{success:!0}}catch($){return{success:!1,error:$.message}}}async function bv(D,F={}){let $=F.jobs||4,B=F.paths&&F.paths.length>0?F.paths:["."];try{let J=["datalad","get","-J",$.toString(),...B],{stdout:Y,stderr:Q,exitCode:X}=await QD(J,{cwd:D});if(X!==0)return{success:!1,error:Q.trim()||"Failed to get dataset data"};let E=Y.match(/get\(ok\):/g);return{success:!0,filesDownloaded:E?E.length:0}}catch(J){return{success:!1,error:J.message}}}async function uv(D){if(!xv(D))return null;try{let{stdout:F,exitCode:$}=await QD(["git","annex","info","--json"],{cwd:D});if($===0){let B=JSON.parse(F),J=0,Y=B["local annex size"]||"0 bytes",Q=Y.match(/([\d.]+)\s*(bytes?|KB|MB|GB|TB)/i);if(Q){let G=Number.parseFloat(Q[1]),W=Q[2].toLowerCase();J=G*({byte:1,bytes:1,kb:1024,mb:1048576,gb:1073741824,tb:1099511627776}[W]||1)}let X=B["annexed files in working tree"]||0,E=B["local annex keys"]||0;return{files:X,size:Y,sizeBytes:J,annexedFiles:X,presentFiles:E,missingFiles:X-E}}}catch{}try{let{stdout:F}=await QD(["find",".","-type","f","-not","-path","./.git/*"],{cwd:D}),$=F.trim().split(`
114
+ Choose option 2 for quick setup, or option 1 if you have multiple GitHub accounts.`}}}}try{let{stdout:Y}=await QD(["git","remote","get-url",B],{cwd:D});if(Y.trim()){let{stderr:Q,exitCode:X}=await QD(["git","remote","set-url",B,J],{cwd:D});if(X!==0)return{success:!1,error:Q.trim()}}else{let{stderr:Q,exitCode:X}=await QD(["git","remote","add",B,J],{cwd:D});if(X!==0)return{success:!1,error:Q.trim()}}return{success:!0}}catch(Y){return{success:!1,error:Y.message}}}async function K$(D,F){try{let{stderr:$,exitCode:B}=await QD(["datalad","save","-m",F],{cwd:D});if(B!==0){if($.includes("nothing to save")||$.includes("no changes"))return{success:!0};return{success:!1,error:$.trim()||"Failed to save dataset"}}return{success:!0}}catch($){return{success:!1,error:$.message}}}async function z$(D,F="origin",$="main"){try{let{stderr:B,exitCode:J}=await QD(["git","push","-u",F,$],{cwd:D});if(J!==0)return{success:!1,error:B.trim()||"Failed to push to GitHub"};let{stderr:Y,exitCode:Q}=await QD(["git","push",F,"git-annex"],{cwd:D});if(Q!==0)console.warn("Warning: Could not push git-annex branch:",Y.trim());return{success:!0}}catch(B){return{success:!1,error:B.message}}}function V$(D){if(D===0)return"0 B";let F=1024,$=["B","KB","MB","GB","TB"],B=Math.floor(Math.log(D)/Math.log(F));return`${Number.parseFloat((D/F**B).toFixed(2))} ${$[B]}`}async function _ED(D,F,$,B){let J=B?.maxRetries??4,Y=B?.initialDelayMs??1e4;try{let Q=await Bun.file(D).arrayBuffer(),X=Q.byteLength,E="";for(let G=0;G<=J;G++){let W=await fetch(F,{method:"PUT",body:Q,headers:{"Content-Length":X.toString()}});if(W.ok)return $?.(X,X),{success:!0};let q=await W.text();if(E=`Upload failed: ${W.status} ${q}`,!(W.status===403&&q.includes("AccessDenied"))||G===J)return{success:!1,error:E};let A=Y+G*5000;await new Promise((N)=>setTimeout(N,A))}return{success:!1,error:E}}catch(Q){return{success:!1,error:Q.message}}}async function A$(D,F,$={}){let B=$.jobs||4,J=Object.entries(F),Y=[],Q=0;for(let X=0;X<J.length;X+=B){let E=J.slice(X,X+B),G=await Promise.all(E.map(async([W,q])=>{let V=qE(D,W);$.onProgress?.({file:W,uploaded:0,total:0,status:"uploading"});let A=await _ED(V,q);if(A.success)Q++,$.onProgress?.({file:W,uploaded:1,total:1,status:"completed"});else Y.push(`${W}: ${A.error||"Unknown error"}`),$.onProgress?.({file:W,uploaded:0,total:1,status:"failed",error:A.error});return{path:W,...A}}))}return{success:Y.length===0,uploaded:Q,failed:Y,error:Y.length>0?`${Y.length} files failed to upload`:void 0}}async function fED(D,F,$){try{if((await QD(["git","annex","add",F],{cwd:D})).exitCode!==0);let J=await QD(["git","annex","lookupkey",F],{cwd:D});if(J.exitCode!==0||!J.stdout.trim())return{success:!1,error:`Could not get git-annex key for ${F}`};let Y=J.stdout.trim(),Q=await QD(["git","annex","registerurl",Y,$],{cwd:D});if(Q.exitCode!==0)return{success:!1,error:`Failed to register URL: ${Q.stderr}`};return{success:!0}}catch(B){return{success:!1,error:B.message}}}async function N$(D,F,$){let B=0,J=[];for(let[Y,Q]of Object.entries(F))if((await fED(D,Y,Q)).success)B++,$?.(Y,!0);else J.push(Y),$?.(Y,!1);return{success:J.length===0,registered:B,failed:J}}async function Z$(){let[D,F]=await Promise.all([fv(),gv()]),$=[];if(!D.installed)$.push("DataLad is not installed. Install: pip install datalad");else if(D.compatible===!1)$.push(`DataLad version ${D.version} is too old. Required: >= ${D.minVersion}`);if(!F.installed)$.push("git-annex is not installed. Install: brew install git-annex (macOS) or apt install git-annex (Linux)");else if(F.compatible===!1)$.push(`git-annex version ${F.version} is too old. Required: >= ${F.minVersion}`);return{datalad:D,gitAnnex:F,allPassed:$.length===0,errors:$}}async function U$(D,F){try{let{stderr:$,exitCode:B}=await QD(["datalad","clone",D,F]);if(B!==0)return{success:!1,error:$.trim()||"Failed to clone dataset"};return{success:!0}}catch($){return{success:!1,error:$.message}}}async function bv(D,F={}){let $=F.jobs||4,B=F.paths&&F.paths.length>0?F.paths:["."];try{let J=["datalad","get","-J",$.toString(),...B],{stdout:Y,stderr:Q,exitCode:X}=await QD(J,{cwd:D});if(X!==0)return{success:!1,error:Q.trim()||"Failed to get dataset data"};let E=Y.match(/get\(ok\):/g);return{success:!0,filesDownloaded:E?E.length:0}}catch(J){return{success:!1,error:J.message}}}async function uv(D){if(!xv(D))return null;try{let{stdout:F,exitCode:$}=await QD(["git","annex","info","--json"],{cwd:D});if($===0){let B=JSON.parse(F),J=0,Y=B["local annex size"]||"0 bytes",Q=Y.match(/([\d.]+)\s*(bytes?|KB|MB|GB|TB)/i);if(Q){let G=Number.parseFloat(Q[1]),W=Q[2].toLowerCase();J=G*({byte:1,bytes:1,kb:1024,mb:1048576,gb:1073741824,tb:1099511627776}[W]||1)}let X=B["annexed files in working tree"]||0,E=B["local annex keys"]||0;return{files:X,size:Y,sizeBytes:J,annexedFiles:X,presentFiles:E,missingFiles:X-E}}}catch{}try{let{stdout:F}=await QD(["find",".","-type","f","-not","-path","./.git/*"],{cwd:D}),$=F.trim().split(`
115
115
  `).filter(Boolean).length;return{files:$,size:"unknown",sizeBytes:0,annexedFiles:0,presentFiles:$,missingFiles:0}}catch{return null}}async function mv(D){try{let{stdout:F,exitCode:$}=await QD(["git","tag","-l","--sort=-version:refname","--format=%(refname:short)|%(creatordate:short)|%(objectname:short)"],{cwd:D});if($!==0||!F.trim())return[];return F.trim().split(`
116
116
  `).map((B)=>{let[J,Y,Q]=B.split("|");return{version:J,date:Y,commit:Q}})}catch{return[]}}async function lv(D,F){try{let $=F.startsWith("v")?F:`v${F}`,{stdout:B,exitCode:J}=await QD(["git","rev-parse",$],{cwd:D});if(J!==0)return null;return B.trim()}catch{return null}}async function dv(D,F,$){try{let{stderr:B,exitCode:J}=await QD(["git","checkout","-b",$],{cwd:D});if(J!==0)return{success:!1,error:B.trim()||"Failed to create branch"};let Y=F.startsWith("v")?F:`v${F}`,{stderr:Q,exitCode:X}=await QD(["git","checkout",Y,"--","."],{cwd:D});if(X!==0)return{success:!1,error:Q.trim()||"Failed to checkout files from target version"};return{success:!0}}catch(B){return{success:!1,error:B.message}}}async function pv(D,F,$){let B=$||`Revert to ${F}`;try{let{exitCode:J}=await QD(["git","add","-A"],{cwd:D});if(J!==0)return{success:!1,error:"Failed to stage changes"};let{stdout:Y}=await QD(["git","status","--porcelain"],{cwd:D});if(!Y.trim())return{success:!1,error:"No changes to revert (already at target version)"};let{stderr:Q,exitCode:X}=await QD(["git","commit","-m",B],{cwd:D});if(X!==0)return{success:!1,error:Q.trim()||"Failed to commit"};return{success:!0}}catch(J){return{success:!1,error:J.message}}}async function cv(D,F,$="origin"){try{let{stderr:B,exitCode:J}=await QD(["git","push","-u",$,F],{cwd:D});if(J!==0)return{success:!1,error:B.trim()||"Failed to push branch"};return{success:!0}}catch(B){return{success:!1,error:B.message}}}var gED=102400,hED=new Set([".edf",".bdf",".eeg",".vhdr",".vmrk",".set",".fdt",".cnt",".mff",".fif",".nii",".nii.gz",".mat",".bin"]);async function nv(D){let F=[],$=0,B=0,J=0,{stdout:Y,exitCode:Q}=await QD(["find",".","-type","f","-not","-path","./.git/*","-not","-path","./.datalad/*","-not","-name",".gitattributes"],{cwd:D});if(Q!==0)return{files:F,totalSize:$,dataFiles:B,metadataFiles:J};let X=Y.trim().split(`
117
117
  `).filter(Boolean);for(let E of X){let G=E.startsWith("./")?E.slice(2):E,W=qE(D,G);try{let V=SED(W).size;$+=V;let A=G.toLowerCase().match(/\.[^.]+$/)?.[0]||"",N=hED.has(A)||V>gED,Z=N?"data":"metadata";if(N)B++;else J++;F.push({path:G,size:V,type:Z})}catch{}}return{files:F,totalSize:$,dataFiles:B,metadataFiles:J}}var n8=new g0("admin").description("Admin commands (requires admin privileges)").addHelpText("after",`
@@ -260,7 +260,7 @@ Examples:
260
260
  $ nemar dataset upload ./my-eeg-dataset
261
261
  $ nemar dataset upload ./ds -n "My EEG Study" -d "64-channel EEG data"
262
262
  $ nemar dataset upload ./ds --dry-run # Preview without uploading
263
- $ nemar dataset upload ./ds -j 16 # More parallel streams`).action(async(D,F)=>{let $=cD();if(!xD())console.log(z.red("Error: Not authenticated")),console.log("Run 'nemar auth login' first"),process.exit(1);if(!Wv())console.log(z.yellow("Sandbox training required")),console.log(),console.log("You must complete sandbox training before uploading real datasets."),console.log("This verifies your setup and familiarizes you with the workflow."),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox")),process.exit(1);let B=X5(D);if(!Q5(B))console.log(z.red(`Error: Path does not exist: ${B}`)),process.exit(1);let J=_("Checking prerequisites...").start(),Y=await G$();if(!Y.allPassed){J.fail("Prerequisites check failed"),console.log();for(let U of Y.errors)console.log(z.red(` - ${U}`));process.exit(1)}if(J.succeed("Prerequisites check passed"),console.log(z.gray(` DataLad ${Y.datalad.version}, git-annex ${Y.gitAnnex.version}`)),Y.githubSSH.username)console.log(z.gray(` GitHub SSH: ${Y.githubSSH.username}`));if(console.log(),!F.skipValidation){J=_("Validating BIDS dataset...").start();let U=X5(B,"dataset_description.json");if(!Q5(U))J.fail("Not a valid BIDS dataset"),console.log(z.red("Missing required file: dataset_description.json")),process.exit(1);if(!(await I$()).installed)J.fail("Deno is required for BIDS validation"),console.log(),console.log(z.red("Error: Deno is not installed")),console.log(),console.log("The BIDS validator requires Deno runtime to run."),console.log("Install Deno with one of these commands:"),console.log(),console.log(z.cyan(" # macOS/Linux (curl)")),console.log(" curl -fsSL https://deno.land/install.sh | sh"),console.log(),console.log(z.cyan(" # macOS (Homebrew)")),console.log(" brew install deno"),console.log(),console.log(z.cyan(" # Windows (PowerShell)")),console.log(" irm https://deno.land/install.ps1 | iex"),console.log(),console.log("Learn more: https://docs.deno.com/runtime/getting_started/installation/"),console.log(),console.log(z.gray("To skip validation (not recommended): nemar dataset upload --skip-validation")),process.exit(1);try{let I=await UE(B,{prune:!0});if(!I.valid)J.fail("Dataset has validation errors"),console.log(),console.log(ME(I)),console.log(),console.log(z.yellow("Fix the errors above before uploading.")),console.log(z.gray("Or use --skip-validation to upload anyway (not recommended).")),process.exit(1);J.succeed(`Dataset is valid BIDS (${I.warningCount} warnings)`)}catch(I){J.fail("Validation failed"),console.log(z.red(I.message)),process.exit(1)}console.log()}J=_("Analyzing dataset files...").start();let Q=F.name||iED(B),X=await nv(B);if(J.succeed(`Found ${X.files.length} files (${X.dataFiles} data, ${X.metadataFiles} metadata)`),console.log(),console.log(z.bold("Upload Plan:")),console.log(` Name: ${Q}`),console.log(` Path: ${B}`),console.log(` Files: ${X.files.length}`),console.log(` Size: ${V$(X.totalSize)}`),console.log(` Data files: ${X.dataFiles} (will be uploaded to S3)`),console.log(` Metadata files: ${X.metadataFiles} (will be stored in git)`),console.log(` Parallel jobs: ${F.jobs}`),console.log(),F.dryRun){console.log(z.yellow("Dry run mode - no changes made"));return}if(!F.yes){let{confirmed:U}=await vD.prompt([{type:"confirm",name:"confirmed",message:"Proceed with upload?",default:!0}]);if(!U){console.log("Upload cancelled.");return}}console.log(),J=_("Creating dataset in NEMAR...").start();let E=X.files.filter((U)=>U.type==="data"),G;try{let U=await X$({name:Q,description:F.description,files:E.map((L)=>({path:L.path,size:L.size,type:L.type}))});G={dataset_id:U.dataset.dataset_id,ssh_url:U.dataset.ssh_url,s3_prefix:U.dataset.s3_prefix,github_url:U.dataset.github_url,upload_urls:U.upload_urls||{},s3_config:U.s3_config},J.succeed(`Dataset created: ${G.dataset_id}`)}catch(U){if(J.fail("Failed to create dataset"),U instanceof YD)console.log(z.red(` ${U.message}`));else console.log(z.red(` ${U.message}`));process.exit(1)}if(J=_("Initializing DataLad dataset...").start(),!await KE(B)){let U=await W$(B);if(!U.success)J.fail("Failed to initialize DataLad dataset"),console.log(z.red(` ${U.error}`)),process.exit(1)}let q=await hv(B);if(!q.success)J.fail("Failed to initialize git-annex"),console.log(z.red(` ${q.error}`)),process.exit(1);let V=await H$(B);if(!V.success)J.warn("Could not configure largefiles pattern"),console.log(z.gray(` ${V.error}`));J.succeed("DataLad dataset initialized"),J=_("Configuring GitHub remote...").start();let A=await q$(B,G.ssh_url,$.githubUsername);if(!A.success)J.fail("Failed to configure GitHub remote"),console.log(z.red(` ${A.error}`)),process.exit(1);J.succeed("GitHub remote configured");let N=Object.keys(G.upload_urls).length;if(N>0){J=_(`Uploading ${N} data files to S3...`).start();let U=0,L=N,I=await A$(B,G.upload_urls,{jobs:Number.parseInt(F.jobs,10),onProgress:(V0)=>{if(V0.status==="completed")U++,J.text=`Uploading data files to S3... (${U}/${L})`}});if(!I.success){J.fail(`Failed to upload some files (${I.failed.length} failed)`);for(let V0 of I.failed.slice(0,5))console.log(z.red(` - ${V0}`));if(I.failed.length>5)console.log(z.red(` ... and ${I.failed.length-5} more`));process.exit(1)}J.succeed(`Uploaded ${I.uploaded} data files to S3`),J=_("Registering file URLs with git-annex...").start();let{s3_config:y,s3_prefix:w}={s3_config:G.s3_config,s3_prefix:G.s3_prefix},KD={};for(let V0 of Object.keys(G.upload_urls))KD[V0]=`${y.public_url}/${w}/${V0}`;let _D=await N$(B,KD);if(!_D.success)J.warn(`Some URLs could not be registered (${_D.failed.length} failed)`);else J.succeed(`Registered ${_D.registered} file URLs with git-annex`)}else console.log(z.gray("No data files to upload to S3"));J=_("Saving dataset changes...").start();let Z=await K$(B,"Initial NEMAR dataset upload");if(!Z.success)J.fail("Failed to save dataset"),console.log(z.red(` ${Z.error}`)),process.exit(1);J.succeed("Dataset changes saved"),J=_("Pushing metadata to GitHub...").start();let M=await z$(B);if(!M.success)J.fail("Failed to push to GitHub"),console.log(z.red(` ${M.error}`)),process.exit(1);J.succeed("Metadata pushed to GitHub"),J=_("Finalizing dataset...").start();try{await E$(G.dataset_id),J.succeed("Dataset finalized")}catch(U){J.warn("Could not finalize dataset (branch protection may need manual setup)"),console.log(z.gray(` ${U.message}`))}console.log(),console.log(z.green.bold("Upload complete!")),console.log(),console.log(` Dataset ID: ${z.cyan(G.dataset_id)}`),console.log(` GitHub: ${z.cyan(G.github_url)}`),console.log(),console.log(z.gray("To clone this dataset:")),console.log(z.gray(` datalad clone ${G.ssh_url}`))});Q1.command("download").description("Download a dataset from NEMAR").argument("<dataset-id>","Dataset ID (e.g., nm000104)").option("-o, --output <path>","Output directory (default: ./<dataset-id>)").option("-j, --jobs <number>","Parallel download streams (default: 4)","4").option("--no-data","Download metadata only (skip large data files)").addHelpText("after",`
263
+ $ nemar dataset upload ./ds -j 16 # More parallel streams`).action(async(D,F)=>{let $=cD();if(!xD())console.log(z.red("Error: Not authenticated")),console.log("Run 'nemar auth login' first"),process.exit(1);if(!Wv())console.log(z.yellow("Sandbox training required")),console.log(),console.log("You must complete sandbox training before uploading real datasets."),console.log("This verifies your setup and familiarizes you with the workflow."),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox")),process.exit(1);let B=X5(D);if(!Q5(B))console.log(z.red(`Error: Path does not exist: ${B}`)),process.exit(1);let J=_("Checking prerequisites...").start(),Y=await G$();if(!Y.allPassed){J.fail("Prerequisites check failed"),console.log();for(let U of Y.errors)console.log(z.red(` - ${U}`));process.exit(1)}if(J.succeed("Prerequisites check passed"),console.log(z.gray(` DataLad ${Y.datalad.version}, git-annex ${Y.gitAnnex.version}`)),Y.githubSSH.username)console.log(z.gray(` GitHub SSH: ${Y.githubSSH.username}`));if(console.log(),!F.skipValidation){J=_("Validating BIDS dataset...").start();let U=X5(B,"dataset_description.json");if(!Q5(U))J.fail("Not a valid BIDS dataset"),console.log(z.red("Missing required file: dataset_description.json")),process.exit(1);if(!(await I$()).installed)J.fail("Deno is required for BIDS validation"),console.log(),console.log(z.red("Error: Deno is not installed")),console.log(),console.log("The BIDS validator requires Deno runtime to run."),console.log("Install Deno with one of these commands:"),console.log(),console.log(z.cyan(" # macOS/Linux (curl)")),console.log(" curl -fsSL https://deno.land/install.sh | sh"),console.log(),console.log(z.cyan(" # macOS (Homebrew)")),console.log(" brew install deno"),console.log(),console.log(z.cyan(" # Windows (PowerShell)")),console.log(" irm https://deno.land/install.ps1 | iex"),console.log(),console.log("Learn more: https://docs.deno.com/runtime/getting_started/installation/"),console.log(),console.log(z.gray("To skip validation (not recommended): nemar dataset upload --skip-validation")),process.exit(1);try{let I=await UE(B,{prune:!0});if(!I.valid)J.fail("Dataset has validation errors"),console.log(),console.log(ME(I)),console.log(),console.log(z.yellow("Fix the errors above before uploading.")),console.log(z.gray("Or use --skip-validation to upload anyway (not recommended).")),process.exit(1);J.succeed(`Dataset is valid BIDS (${I.warningCount} warnings)`)}catch(I){J.fail("Validation failed"),console.log(z.red(I.message)),process.exit(1)}console.log()}J=_("Analyzing dataset files...").start();let Q=F.name||iED(B),X=await nv(B);if(J.succeed(`Found ${X.files.length} files (${X.dataFiles} data, ${X.metadataFiles} metadata)`),console.log(),console.log(z.bold("Upload Plan:")),console.log(` Name: ${Q}`),console.log(` Path: ${B}`),console.log(` Files: ${X.files.length}`),console.log(` Size: ${V$(X.totalSize)}`),console.log(` Data files: ${X.dataFiles} (will be uploaded to S3)`),console.log(` Metadata files: ${X.metadataFiles} (will be stored in git)`),console.log(` Parallel jobs: ${F.jobs}`),console.log(),F.dryRun){console.log(z.yellow("Dry run mode - no changes made"));return}if(!F.yes){let{confirmed:U}=await vD.prompt([{type:"confirm",name:"confirmed",message:"Proceed with upload?",default:!0}]);if(!U){console.log("Upload cancelled.");return}}console.log(),J=_("Creating dataset in NEMAR...").start();let E=X.files.filter((U)=>U.type==="data"),G;try{let U=await X$({name:Q,description:F.description,files:E.map((L)=>({path:L.path,size:L.size,type:L.type}))});G={dataset_id:U.dataset.dataset_id,ssh_url:U.dataset.ssh_url,s3_prefix:U.dataset.s3_prefix,github_url:U.dataset.github_url,upload_urls:U.upload_urls||{},s3_config:U.s3_config},J.succeed(`Dataset created: ${G.dataset_id}`),await new Promise((L)=>setTimeout(L,1e4))}catch(U){if(J.fail("Failed to create dataset"),U instanceof YD)console.log(z.red(` ${U.message}`));else console.log(z.red(` ${U.message}`));process.exit(1)}if(J=_("Initializing DataLad dataset...").start(),!await KE(B)){let U=await W$(B);if(!U.success)J.fail("Failed to initialize DataLad dataset"),console.log(z.red(` ${U.error}`)),process.exit(1)}let q=await hv(B);if(!q.success)J.fail("Failed to initialize git-annex"),console.log(z.red(` ${q.error}`)),process.exit(1);let V=await H$(B);if(!V.success)J.warn("Could not configure largefiles pattern"),console.log(z.gray(` ${V.error}`));J.succeed("DataLad dataset initialized"),J=_("Configuring GitHub remote...").start();let A=await q$(B,G.ssh_url,$.githubUsername);if(!A.success)J.fail("Failed to configure GitHub remote"),console.log(z.red(` ${A.error}`)),process.exit(1);J.succeed("GitHub remote configured");let N=Object.keys(G.upload_urls).length;if(N>0){J=_(`Uploading ${N} data files to S3...`).start();let U=0,L=N,I=await A$(B,G.upload_urls,{jobs:Number.parseInt(F.jobs,10),onProgress:(V0)=>{if(V0.status==="completed")U++,J.text=`Uploading data files to S3... (${U}/${L})`}});if(!I.success){J.fail(`Failed to upload some files (${I.failed.length} failed)`);for(let V0 of I.failed.slice(0,5))console.log(z.red(` - ${V0}`));if(I.failed.length>5)console.log(z.red(` ... and ${I.failed.length-5} more`));process.exit(1)}J.succeed(`Uploaded ${I.uploaded} data files to S3`),J=_("Registering file URLs with git-annex...").start();let{s3_config:y,s3_prefix:w}={s3_config:G.s3_config,s3_prefix:G.s3_prefix},KD={};for(let V0 of Object.keys(G.upload_urls))KD[V0]=`${y.public_url}/${w}/${V0}`;let _D=await N$(B,KD);if(!_D.success)J.warn(`Some URLs could not be registered (${_D.failed.length} failed)`);else J.succeed(`Registered ${_D.registered} file URLs with git-annex`)}else console.log(z.gray("No data files to upload to S3"));J=_("Saving dataset changes...").start();let Z=await K$(B,"Initial NEMAR dataset upload");if(!Z.success)J.fail("Failed to save dataset"),console.log(z.red(` ${Z.error}`)),process.exit(1);J.succeed("Dataset changes saved"),J=_("Pushing metadata to GitHub...").start();let M=await z$(B);if(!M.success)J.fail("Failed to push to GitHub"),console.log(z.red(` ${M.error}`)),process.exit(1);J.succeed("Metadata pushed to GitHub"),J=_("Finalizing dataset...").start();try{await E$(G.dataset_id),J.succeed("Dataset finalized")}catch(U){J.warn("Could not finalize dataset (branch protection may need manual setup)"),console.log(z.gray(` ${U.message}`))}console.log(),console.log(z.green.bold("Upload complete!")),console.log(),console.log(` Dataset ID: ${z.cyan(G.dataset_id)}`),console.log(` GitHub: ${z.cyan(G.github_url)}`),console.log(),console.log(z.gray("To clone this dataset:")),console.log(z.gray(` datalad clone ${G.ssh_url}`))});Q1.command("download").description("Download a dataset from NEMAR").argument("<dataset-id>","Dataset ID (e.g., nm000104)").option("-o, --output <path>","Output directory (default: ./<dataset-id>)").option("-j, --jobs <number>","Parallel download streams (default: 4)","4").option("--no-data","Download metadata only (skip large data files)").addHelpText("after",`
264
264
  Description:
265
265
  Download a BIDS dataset from NEMAR. Uses DataLad/git-annex for efficient
266
266
  data transfer with parallel streams.
@@ -355,7 +355,7 @@ Examples:
355
355
  $ nemar sandbox # Run sandbox training
356
356
  $ nemar sandbox status # Check if training is completed
357
357
  $ nemar sandbox reset # Reset for re-training
358
- `).action(eED);async function eED(){if(console.log(),console.log(z.bold("NEMAR Sandbox Training")),console.log(z.gray("Verify your setup and learn the upload workflow")),console.log(),!xD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}let D=cD();if(D.sandboxCompleted){console.log(z.green("Sandbox training already completed!")),console.log(z.gray(`Dataset ID: ${D.sandboxDatasetId}`)),console.log(),console.log("You can upload real datasets with:"),console.log(z.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(z.gray("To re-run training, use: nemar sandbox reset"));return}console.log(z.bold("Step 1/6: Checking prerequisites..."));let F=_("Checking DataLad, git-annex, and SSH...").start(),$=await G$();if(!$.allPassed){F.fail("Prerequisites check failed"),console.log(),console.log(z.red("Missing requirements:"));for(let N of $.errors)console.log(z.yellow(` - ${N}`));if(!$.githubSSH.accessible)console.log(z.gray(" Run 'nemar auth setup-ssh' to configure SSH"));return}F.succeed("All prerequisites met"),console.log(),console.log(z.bold("Step 2/6: Generating test dataset..."));let B=_("Creating minimal BIDS structure...").start(),J;try{let N=Fy();J=N.root;let Z=$y(N);B.succeed(`Test dataset created (${V$(Z)})`),console.log(z.gray(` Location: ${J}`))}catch(N){B.fail("Failed to generate test dataset"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`));return}console.log(),console.log(z.bold("Step 3/6: Registering sandbox dataset..."));let Y=_("Creating dataset on NEMAR...").start(),Q,X,E,G,W;try{let N=await X$({name:"Sandbox Training Dataset",description:"Placeholder dataset for sandbox training",files:[{path:"sub-01/eeg/sub-01_task-rest_eeg.edf",size:512000,type:"data"},{path:"dataset_description.json",size:200,type:"metadata"},{path:"participants.tsv",size:50,type:"metadata"},{path:"README",size:500,type:"metadata"},{path:"sub-01/eeg/sub-01_task-rest_eeg.json",size:300,type:"metadata"}],sandbox:!0});Q=N.dataset.dataset_id,X=N.dataset.ssh_url,E=N.s3_config,G=N.dataset.s3_prefix,W=N.upload_urls||{},Y.succeed(`Sandbox dataset created: ${z.cyan(Q)}`),console.log(z.gray(` GitHub: ${N.dataset.github_url}`)),await new Promise((Z)=>setTimeout(Z,3000))}catch(N){if(Y.fail("Failed to create sandbox dataset"),N instanceof YD)console.log(z.red(` ${N.message}`));else console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`));v1(J);return}console.log(),console.log(z.bold("Step 4/6: Initializing repository..."));let q=_("Setting up DataLad and git-annex...").start();try{await W$(J),await H$(J),await q$(J,X),q.succeed("Repository initialized")}catch(N){q.fail("Failed to initialize repository"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),v1(J);return}if(console.log(),console.log(z.bold("Step 5/6: Uploading to S3...")),Object.keys(W).length===0)console.log(z.yellow(" No data files to upload (metadata only)"));else{let N=_("Uploading test data...").start();try{let M=0,U=Object.keys(W).length,L=await A$(J,W,{jobs:4,onProgress:(I)=>{if(I.status==="completed"||I.status==="failed")M++,N.text=`Uploading... ${M}/${U} files`}});if(L.failed.length>0){N.fail(`Upload failed for ${L.failed.length} file(s)`);for(let I of L.failed)console.log(z.red(` Failed: ${I}`));if(L.error)console.log(z.red(` Error: ${L.error}`));console.log(),console.log(z.yellow("Sandbox training aborted due to upload failures.")),console.log(z.gray("Please check your network connection and try again.")),v1(J);return}N.succeed(`Uploaded ${L.uploaded} file(s)`)}catch(M){N.fail("Upload failed"),console.log(z.red(` ${M instanceof Error?M.message:"Unknown error"}`)),v1(J);return}let Z=_("Registering file URLs...").start();try{let M={};for(let L of Object.keys(W))M[L]=`${E.public_url}/${G}/${L}`;let U=await N$(J,M);if(!U.success){Z.fail(`URL registration failed for ${U.failed.length} file(s)`);for(let L of U.failed)console.log(z.red(` Failed: ${L}`));console.log(),console.log(z.yellow("Sandbox training aborted due to URL registration failures.")),console.log(z.gray("This may indicate a git-annex configuration issue.")),v1(J);return}Z.succeed(`Registered ${U.registered} file URLs`)}catch(M){Z.fail("Failed to register URLs"),console.log(z.red(` ${M instanceof Error?M.message:"Unknown error"}`)),v1(J);return}}console.log(),console.log(z.bold("Step 6/6: Pushing to GitHub..."));let V=_("Saving and pushing...").start();try{await K$(J,"Initial sandbox training upload"),await z$(J),V.succeed("Pushed to GitHub")}catch(N){V.fail("Failed to push to GitHub"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),v1(J);return}let A=_("Finalizing...").start();try{await E$(Q),await Pv(Q),nD("sandboxCompleted",!0),nD("sandboxDatasetId",Q),A.succeed("Sandbox training complete!")}catch(N){A.fail("Failed to finalize"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),v1(J);return}v1(J),console.log(),console.log(z.green.bold("Congratulations! Sandbox training completed successfully.")),console.log(),console.log("Your setup is verified and you're ready to upload real datasets:"),console.log(z.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(z.gray(`Sandbox dataset: ${Q}`))}O$.command("status").description("Check sandbox training completion status").option("--refresh","Fetch latest status from server").action(async(D)=>{if(!xD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}if(D.refresh){let F=_("Checking status...").start();try{let $=await kv();if(nD("sandboxCompleted",$.sandbox_completed),$.sandbox_dataset_id)nD("sandboxDatasetId",$.sandbox_dataset_id);if(F.stop(),$.sandbox_completed){if(console.log(z.green("Sandbox training: Completed")),console.log(z.gray(` Dataset ID: ${$.sandbox_dataset_id}`)),$.sandbox_completed_at)console.log(z.gray(` Completed: ${$.sandbox_completed_at}`))}else console.log(z.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox"))}catch($){if(F.fail("Failed to check status"),$ instanceof YD)console.log(z.red(` ${$.message}`))}}else{let F=cD();if(F.sandboxCompleted)console.log(z.green("Sandbox training: Completed")),console.log(z.gray(` Dataset ID: ${F.sandboxDatasetId}`));else console.log(z.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox"))}});O$.command("reset").description("Reset sandbox training status for re-training").option("-f, --force","Skip confirmation prompt").action(async(D)=>{if(!xD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}if(!cD().sandboxCompleted){console.log(z.yellow("Sandbox training not yet completed")),console.log(z.gray("Nothing to reset"));return}if(!D.force){let B=(await Promise.resolve().then(() => (q3(),FI))).default,{confirm:J}=await B.prompt([{type:"confirm",name:"confirm",message:"Reset sandbox training status? You will need to complete training again.",default:!1}]);if(!J){console.log(z.gray("Cancelled"));return}}let $=_("Resetting sandbox status...").start();try{await Sv(),nD("sandboxCompleted",!1),nD("sandboxDatasetId",void 0),$.succeed("Sandbox status reset"),console.log(),console.log("Run sandbox training again with:"),console.log(z.cyan(" nemar sandbox"))}catch(B){if($.fail("Failed to reset"),B instanceof YD)console.log(z.red(` ${B.message}`));else console.log(z.red(` ${B instanceof Error?B.message:"Unknown error"}`))}});var By={name:"nemar-cli",version:"0.3.3-dev.44",description:"CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",type:"module",main:"dist/index.js",bin:{nemar:"dist/index.js"},scripts:{dev:"bun run src/index.ts",build:"bun build src/index.ts --outdir dist --target bun --minify && sed '1s|#!/usr/bin/env node|#!/usr/bin/env bun|' dist/index.js > dist/index.js.tmp && mv dist/index.js.tmp dist/index.js",test:"bun test",lint:"biome check src/","lint:fix":"biome check --fix src/",format:"biome format --write src/",typecheck:"tsc --noEmit",prepublishOnly:"bun run build","docs:generate":"bun run scripts/generate-docs.ts","docs:serve":"mkdocs serve","docs:build":"mkdocs build"},keywords:["nemar","bids","neuroimaging","eeg","emg","datalad","cli"],author:"NEMAR Team",license:"MIT",repository:{type:"git",url:"git+https://github.com/nemarDatasets/nemar-cli.git"},bugs:{url:"https://github.com/nemarDatasets/nemar-cli/issues"},homepage:"https://nemar-cli.pages.dev",engines:{bun:">=1.0.0"},files:["dist","README.md","LICENSE"],dependencies:{chalk:"^5.3.0",commander:"^12.1.0",conf:"^13.0.1",inquirer:"^9.2.15",ora:"^8.0.1",zod:"^3.23.8"},devDependencies:{"@biomejs/biome":"^1.9.4","@types/bcryptjs":"^3.0.0","@types/bun":"latest","@types/inquirer":"^9.0.7",bcryptjs:"^3.0.3",typescript:"^5.5.4"}};var Jy=By.version;var X1=new g0;X1.name("nemar").description(`CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource)
358
+ `).action(eED);async function eED(){if(console.log(),console.log(z.bold("NEMAR Sandbox Training")),console.log(z.gray("Verify your setup and learn the upload workflow")),console.log(),!xD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}let D=cD();if(D.sandboxCompleted){console.log(z.green("Sandbox training already completed!")),console.log(z.gray(`Dataset ID: ${D.sandboxDatasetId}`)),console.log(),console.log("You can upload real datasets with:"),console.log(z.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(z.gray("To re-run training, use: nemar sandbox reset"));return}console.log(z.bold("Step 1/6: Checking prerequisites..."));let F=_("Checking DataLad, git-annex, and SSH...").start(),$=await G$();if(!$.allPassed){F.fail("Prerequisites check failed"),console.log(),console.log(z.red("Missing requirements:"));for(let N of $.errors)console.log(z.yellow(` - ${N}`));if(!$.githubSSH.accessible)console.log(z.gray(" Run 'nemar auth setup-ssh' to configure SSH"));return}F.succeed("All prerequisites met"),console.log(),console.log(z.bold("Step 2/6: Generating test dataset..."));let B=_("Creating minimal BIDS structure...").start(),J;try{let N=Fy();J=N.root;let Z=$y(N);B.succeed(`Test dataset created (${V$(Z)})`),console.log(z.gray(` Location: ${J}`))}catch(N){B.fail("Failed to generate test dataset"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`));return}console.log(),console.log(z.bold("Step 3/6: Registering sandbox dataset..."));let Y=_("Creating dataset on NEMAR...").start(),Q,X,E,G,W;try{let N=await X$({name:"Sandbox Training Dataset",description:"Placeholder dataset for sandbox training",files:[{path:"sub-01/eeg/sub-01_task-rest_eeg.edf",size:512000,type:"data"},{path:"dataset_description.json",size:200,type:"metadata"},{path:"participants.tsv",size:50,type:"metadata"},{path:"README",size:500,type:"metadata"},{path:"sub-01/eeg/sub-01_task-rest_eeg.json",size:300,type:"metadata"}],sandbox:!0});Q=N.dataset.dataset_id,X=N.dataset.ssh_url,E=N.s3_config,G=N.dataset.s3_prefix,W=N.upload_urls||{},Y.succeed(`Sandbox dataset created: ${z.cyan(Q)}`),console.log(z.gray(` GitHub: ${N.dataset.github_url}`)),await new Promise((Z)=>setTimeout(Z,1e4))}catch(N){if(Y.fail("Failed to create sandbox dataset"),N instanceof YD)console.log(z.red(` ${N.message}`));else console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`));v1(J);return}console.log(),console.log(z.bold("Step 4/6: Initializing repository..."));let q=_("Setting up DataLad and git-annex...").start();try{await W$(J),await H$(J),await q$(J,X),q.succeed("Repository initialized")}catch(N){q.fail("Failed to initialize repository"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),v1(J);return}if(console.log(),console.log(z.bold("Step 5/6: Uploading to S3...")),Object.keys(W).length===0)console.log(z.yellow(" No data files to upload (metadata only)"));else{let N=_("Uploading test data...").start();try{let M=0,U=Object.keys(W).length,L=await A$(J,W,{jobs:4,onProgress:(I)=>{if(I.status==="completed"||I.status==="failed")M++,N.text=`Uploading... ${M}/${U} files`}});if(L.failed.length>0){N.fail(`Upload failed for ${L.failed.length} file(s)`);for(let I of L.failed)console.log(z.red(` Failed: ${I}`));if(L.error)console.log(z.red(` Error: ${L.error}`));console.log(),console.log(z.yellow("Sandbox training aborted due to upload failures.")),console.log(z.gray("Please check your network connection and try again.")),v1(J);return}N.succeed(`Uploaded ${L.uploaded} file(s)`)}catch(M){N.fail("Upload failed"),console.log(z.red(` ${M instanceof Error?M.message:"Unknown error"}`)),v1(J);return}let Z=_("Registering file URLs...").start();try{let M={};for(let L of Object.keys(W))M[L]=`${E.public_url}/${G}/${L}`;let U=await N$(J,M);if(!U.success){Z.fail(`URL registration failed for ${U.failed.length} file(s)`);for(let L of U.failed)console.log(z.red(` Failed: ${L}`));console.log(),console.log(z.yellow("Sandbox training aborted due to URL registration failures.")),console.log(z.gray("This may indicate a git-annex configuration issue.")),v1(J);return}Z.succeed(`Registered ${U.registered} file URLs`)}catch(M){Z.fail("Failed to register URLs"),console.log(z.red(` ${M instanceof Error?M.message:"Unknown error"}`)),v1(J);return}}console.log(),console.log(z.bold("Step 6/6: Pushing to GitHub..."));let V=_("Saving and pushing...").start();try{await K$(J,"Initial sandbox training upload"),await z$(J),V.succeed("Pushed to GitHub")}catch(N){V.fail("Failed to push to GitHub"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),v1(J);return}let A=_("Finalizing...").start();try{await E$(Q),await Pv(Q),nD("sandboxCompleted",!0),nD("sandboxDatasetId",Q),A.succeed("Sandbox training complete!")}catch(N){A.fail("Failed to finalize"),console.log(z.red(` ${N instanceof Error?N.message:"Unknown error"}`)),v1(J);return}v1(J),console.log(),console.log(z.green.bold("Congratulations! Sandbox training completed successfully.")),console.log(),console.log("Your setup is verified and you're ready to upload real datasets:"),console.log(z.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(z.gray(`Sandbox dataset: ${Q}`))}O$.command("status").description("Check sandbox training completion status").option("--refresh","Fetch latest status from server").action(async(D)=>{if(!xD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}if(D.refresh){let F=_("Checking status...").start();try{let $=await kv();if(nD("sandboxCompleted",$.sandbox_completed),$.sandbox_dataset_id)nD("sandboxDatasetId",$.sandbox_dataset_id);if(F.stop(),$.sandbox_completed){if(console.log(z.green("Sandbox training: Completed")),console.log(z.gray(` Dataset ID: ${$.sandbox_dataset_id}`)),$.sandbox_completed_at)console.log(z.gray(` Completed: ${$.sandbox_completed_at}`))}else console.log(z.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox"))}catch($){if(F.fail("Failed to check status"),$ instanceof YD)console.log(z.red(` ${$.message}`))}}else{let F=cD();if(F.sandboxCompleted)console.log(z.green("Sandbox training: Completed")),console.log(z.gray(` Dataset ID: ${F.sandboxDatasetId}`));else console.log(z.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(z.cyan(" nemar sandbox"))}});O$.command("reset").description("Reset sandbox training status for re-training").option("-f, --force","Skip confirmation prompt").action(async(D)=>{if(!xD()){console.log(z.red("Not authenticated")),console.log(z.gray("Run 'nemar auth login' first"));return}if(!cD().sandboxCompleted){console.log(z.yellow("Sandbox training not yet completed")),console.log(z.gray("Nothing to reset"));return}if(!D.force){let B=(await Promise.resolve().then(() => (q3(),FI))).default,{confirm:J}=await B.prompt([{type:"confirm",name:"confirm",message:"Reset sandbox training status? You will need to complete training again.",default:!1}]);if(!J){console.log(z.gray("Cancelled"));return}}let $=_("Resetting sandbox status...").start();try{await Sv(),nD("sandboxCompleted",!1),nD("sandboxDatasetId",void 0),$.succeed("Sandbox status reset"),console.log(),console.log("Run sandbox training again with:"),console.log(z.cyan(" nemar sandbox"))}catch(B){if($.fail("Failed to reset"),B instanceof YD)console.log(z.red(` ${B.message}`));else console.log(z.red(` ${B instanceof Error?B.message:"Unknown error"}`))}});var By={name:"nemar-cli",version:"0.3.3-dev.47",description:"CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",type:"module",main:"dist/index.js",bin:{nemar:"dist/index.js"},scripts:{dev:"bun run src/index.ts",build:"bun build src/index.ts --outdir dist --target bun --minify && sed '1s|#!/usr/bin/env node|#!/usr/bin/env bun|' dist/index.js > dist/index.js.tmp && mv dist/index.js.tmp dist/index.js",test:"bun test",lint:"biome check src/","lint:fix":"biome check --fix src/",format:"biome format --write src/",typecheck:"tsc --noEmit",prepublishOnly:"bun run build","docs:generate":"bun run scripts/generate-docs.ts","docs:serve":"mkdocs serve","docs:build":"mkdocs build"},keywords:["nemar","bids","neuroimaging","eeg","emg","datalad","cli"],author:"NEMAR Team",license:"MIT",repository:{type:"git",url:"git+https://github.com/nemarDatasets/nemar-cli.git"},bugs:{url:"https://github.com/nemarDatasets/nemar-cli/issues"},homepage:"https://nemar-cli.pages.dev",engines:{bun:">=1.0.0"},files:["dist","README.md","LICENSE"],dependencies:{chalk:"^5.3.0",commander:"^12.1.0",conf:"^13.0.1",inquirer:"^9.2.15",ora:"^8.0.1",zod:"^3.23.8"},devDependencies:{"@biomejs/biome":"^1.9.4","@types/bcryptjs":"^3.0.0","@types/bun":"latest","@types/inquirer":"^9.0.7",bcryptjs:"^3.0.3",typescript:"^5.5.4"}};var Jy=By.version;var X1=new g0;X1.name("nemar").description(`CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource)
359
359
 
360
360
  NEMAR is a curated repository for neurophysiology data in BIDS format.
361
361
  This CLI provides tools for uploading, downloading, and managing datasets.`).version(Jy,"-v, --version","Output the current version").option("--no-color","Disable colored output").option("--verbose","Enable verbose output").addHelpText("after",`
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nemar-cli",
3
- "version": "0.3.3-dev.44",
3
+ "version": "0.3.3-dev.47",
4
4
  "description": "CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",