nemar-cli 0.3.11-dev.170 → 0.3.11-dev.172

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +2 -2
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -367,7 +367,7 @@ Examples:
367
367
  $ nemar dataset upload ./my-eeg-dataset
368
368
  $ nemar dataset upload ./ds -n "My EEG Study" -d "64-channel EEG data"
369
369
  $ nemar dataset upload ./ds --dry-run # Preview without uploading
370
- $ nemar dataset upload ./ds -j 16 # More parallel streams`).action(async(D,F)=>{let $=eD();if(!PD())console.log(E.red("Error: Not authenticated")),console.log("Run 'nemar auth login' first"),process.exit(1);if(!$v())console.log(E.yellow("Sandbox training required")),console.log(),console.log("You must complete sandbox training before uploading real datasets."),console.log("This verifies your setup and familiarizes you with the workflow."),console.log(),console.log("Run sandbox training with:"),console.log(E.cyan(" nemar sandbox")),process.exit(1);let B=MF(D);if(!LF(B))console.log(E.red(`Error: Path does not exist: ${B}`)),process.exit(1);let J=C("Checking prerequisites...").start(),Q=await A$();if(!Q.allPassed){J.fail("Prerequisites check failed"),console.log();for(let h of Q.errors)console.log(E.red(` - ${h}`));process.exit(1)}if(J.succeed("Prerequisites check passed"),console.log(E.gray(` git-annex ${Q.gitAnnex.version}`)),Q.githubSSH.username)console.log(E.gray(` GitHub SSH: ${Q.githubSSH.username}`));console.log(),J=C("Verifying GitHub CLI authentication...").start();let X=await L$($.githubUsername);if(!X.authenticated)J.fail("GitHub CLI not authenticated"),console.log(E.red(` ${X.error}`)),console.log(),console.log("GitHub CLI is required for dataset uploads. Install and authenticate:"),console.log(E.cyan(" brew install gh # or visit https://cli.github.com/")),console.log(E.cyan(" gh auth login")),process.exit(1);if($.githubUsername&&!X.matches)J.warn("GitHub CLI user mismatch"),console.log(E.yellow(` ${X.error}`)),console.log(),console.log("Your gh CLI is authenticated as a different GitHub account than your NEMAR account."),console.log("This may cause issues with repository access. To fix:"),console.log(E.cyan(` gh auth login # Login as ${$.githubUsername}`)),console.log(),console.log(E.yellow("WARNING: If upload fails with permission errors, this mismatch is the likely cause.")),console.log();else J.succeed(`GitHub CLI authenticated as ${X.username}`);if(!F.skipValidation){J=C("Validating BIDS dataset...").start();let h=MF(B,"dataset_description.json");if(!LF(h))J.fail("Not a valid BIDS dataset"),console.log(E.red("Missing required file: dataset_description.json")),process.exit(1);if(!(await k$()).installed)J.fail("Deno is required for BIDS validation"),console.log(),console.log(E.red("Error: Deno is not installed")),console.log(),console.log("The BIDS validator requires Deno runtime to run."),console.log("Install Deno with one of these commands:"),console.log(),console.log(E.cyan(" # macOS/Linux (curl)")),console.log(" curl -fsSL https://deno.land/install.sh | sh"),console.log(),console.log(E.cyan(" # macOS (Homebrew)")),console.log(" brew install deno"),console.log(),console.log(E.cyan(" # Windows (PowerShell)")),console.log(" irm https://deno.land/install.ps1 | iex"),console.log(),console.log("Learn more: https://docs.deno.com/runtime/getting_started/installation/"),console.log(),console.log(E.gray("To skip validation (not recommended): nemar dataset upload --skip-validation")),process.exit(1);try{let ED=await Wy(B,{prune:!0});if(!ED.valid)J.fail("Dataset has validation errors"),console.log(),console.log(qy(ED)),console.log(),console.log(E.yellow("Fix the errors above before uploading.")),console.log(E.gray("Or use --skip-validation to upload anyway (not recommended).")),process.exit(1);J.succeed(`Dataset is valid BIDS (${ED.warningCount} warnings)`)}catch(ED){J.fail("Validation failed"),console.log(E.red(ED.message)),process.exit(1)}console.log()}J=C("Analyzing dataset files...").start();let Y=F.name||UGD(B),G=await Dy(B);J.succeed(`Found ${G.files.length} files (${G.dataFiles} data, ${G.metadataFiles} metadata)`);let H=k2(B);if(console.log(),H)console.log(E.bold.yellow("Resume Upload:")),console.log(` Dataset ID: ${E.cyan(H.dataset_id)}`),console.log(` Last attempt: ${H.last_upload_at||H.created_at}`);else console.log(E.bold("Upload Plan:"));if(console.log(` Name: ${Y}`),console.log(` Path: ${B}`),console.log(` Files: ${G.files.length}`),console.log(` Size: ${C$(G.totalSize)}`),console.log(` Data files: ${G.dataFiles} (will be uploaded to S3)`),console.log(` Metadata files: ${G.metadataFiles} (will be stored in git)`),console.log(` Parallel jobs: ${F.jobs}`),console.log(),F.dryRun){console.log(E.yellow("Dry run mode - no changes made"));return}let W=await mD("Proceed with upload?",{yes:F.yes,no:F.no},!0);if(W!=="confirmed"){console.log(W==="declined"?"Upload skipped.":"Upload cancelled.");return}console.log();let K=G.files.filter((h)=>h.type==="data"),V;if(H!==null){J=C(`Resuming upload for ${H.dataset_id}...`).start();try{await X8(H.dataset_id);let h=await Rv(H.dataset_id,K.map((qD)=>qD.path));V={dataset_id:H.dataset_id,ssh_url:H.ssh_url,s3_prefix:H.s3_prefix,github_url:H.github_url,upload_urls:h.upload_urls,s3_config:H.s3_config},J.succeed(`Resuming upload: ${V.dataset_id}`)}catch(h){if(J.fail("Failed to resume upload"),h instanceof g){if(console.log(E.red(` ${h.message}`)),h.statusCode===404)console.log(E.yellow(" The dataset may have been deleted. Try uploading as a new dataset.")),console.log(E.gray(` Remove ${B}/.nemar to start fresh.`))}else console.log(E.red(` ${h.message}`));process.exit(1)}}else{J=C("Creating dataset in NEMAR...").start();try{let h=await z$({name:Y,description:F.description,files:K.map((ED)=>({path:ED.path,size:ED.size,type:ED.type}))});V={dataset_id:h.dataset.dataset_id,ssh_url:h.dataset.ssh_url,s3_prefix:h.dataset.s3_prefix,github_url:h.dataset.github_url,upload_urls:h.upload_urls||{},s3_config:h.s3_config};let qD={dataset_id:V.dataset_id,github_url:V.github_url,ssh_url:V.ssh_url,s3_prefix:V.s3_prefix,s3_config:V.s3_config,created_at:new Date().toISOString()};v2(B,qD),J.succeed(`Dataset created: ${V.dataset_id}`),await new Promise((ED)=>setTimeout(ED,1e4))}catch(h){if(J.fail("Failed to create dataset"),h instanceof g)console.log(E.red(` ${h.message}`));else console.log(E.red(` ${h.message}`));process.exit(1)}}J=C("Accepting GitHub repository invitation...").start();let Z=V.github_url?.match(/github\.com\/([a-zA-Z0-9_.-]+\/[a-zA-Z0-9_.-]+)/),U=Z?Z[1].replace(/\.git$/,""):null;if(!U)J.fail("Invalid GitHub repository URL from backend"),console.log(E.red(` Received: ${V.github_url||"(empty)"}`)),console.log(E.red(" Expected format: https://github.com/owner/repo")),console.log(),console.log("This may indicate a backend issue. Please contact support."),process.exit(1);let L=await M$(U);if(L.accepted)if(L.alreadyCollaborator)J.succeed("Already a collaborator on this repository");else J.succeed("GitHub invitation accepted");else J.warn("Could not auto-accept invitation"),console.log(E.yellow(` ${L.error}`)),console.log(),console.log("You may need to accept the invitation manually:"),console.log(E.cyan(` https://github.com/${U}/invitations`)),console.log();J=C("Initializing git-annex dataset...").start();let M=$.username&&$.email?{name:$.username,email:$.email}:void 0;if(!await Y8(B)){let h=await Z$(B,{author:M});if(!h.success)J.fail("Failed to initialize git-annex dataset"),console.log(E.red(` ${h.error}`)),process.exit(1)}let P=await pv(B);if(!P.success)J.fail("Failed to initialize git-annex"),console.log(E.red(` ${P.error}`)),process.exit(1);let u=await U$(B);if(!u.success)J.warn("Could not configure largefiles pattern"),console.log(E.gray(` ${u.error}`));J.succeed("git-annex dataset initialized"),J=C("Configuring GitHub remote...").start();let T=await N$(B,V.ssh_url);if(!T.success)J.fail("Failed to configure GitHub remote"),console.log(E.red(` ${T.error}`)),process.exit(1);J.succeed("GitHub remote configured");let O=Object.keys(V.upload_urls).length;if(O>0){J=C(`Uploading ${O} data files to S3...`).start();let h=0,qD=O,ED=await R$(B,V.upload_urls,{jobs:Number.parseInt(F.jobs,10),onProgress:(b6)=>{if(b6.status==="completed")h++,J.text=`Uploading data files to S3... (${h}/${qD})`}});if(!ED.success){J.fail(`Failed to upload some files (${ED.failed.length} failed)`);for(let b6 of ED.failed.slice(0,5))console.log(E.red(` - ${b6}`));if(ED.failed.length>5)console.log(E.red(` ... and ${ED.failed.length-5} more`));process.exit(1)}J.succeed(`Uploaded ${ED.uploaded} data files to S3`),J=C("Registering file URLs with git-annex...").start();let{s3_config:h6,s3_prefix:$6}={s3_config:V.s3_config,s3_prefix:V.s3_prefix},V5={};for(let b6 of Object.keys(V.upload_urls))V5[b6]=`${h6.public_url}/${$6}/${b6}`;let x$=await w$(B,V5);if(!x$.success)J.warn(`Some URLs could not be registered (${x$.failed.length} failed)`);else J.succeed(`Registered ${x$.registered} file URLs with git-annex`)}else console.log(E.gray("No data files to upload to S3"));J=C("Saving dataset changes...").start();let YD=await W5(B,"Initial NEMAR dataset upload",M);if(!YD.success)J.fail("Failed to save dataset"),console.log(E.red(` ${YD.error}`)),process.exit(1);J.succeed("Dataset changes saved"),J=C("Pushing metadata to GitHub...").start();let yD=await E5(B);if(!yD.success)J.fail("Failed to push to GitHub"),console.log(E.red(` ${yD.error}`)),process.exit(1);J.succeed("Metadata pushed to GitHub"),J=C("Setting up BIDS validation CI...").start();try{await K$(V.dataset_id),J.succeed("BIDS validation CI configured")}catch(h){if(h instanceof g&&h.statusCode===403)J.info("CI workflow will be configured by an admin");else{let qD=h instanceof Error?h.message:String(h);J.warn(`Could not configure CI: ${qD}`),console.log(E.gray(` An admin can add it later with: nemar admin ci add ${V.dataset_id}`))}}Ay(B),console.log(),console.log(E.green.bold("Upload complete!")),console.log(),console.log(` Dataset ID: ${E.cyan(V.dataset_id)}`),console.log(` GitHub: ${E.cyan(V.github_url)}`),console.log(),console.log(E.gray("To clone this dataset:")),console.log(E.gray(` git clone ${V.ssh_url}`))});gD.command("download").description("Download a dataset from NEMAR").argument("<dataset-id>","Dataset ID (e.g., nm000104)").option("-o, --output <path>","Output directory (default: ./<dataset-id>)").option("-j, --jobs <number>","Parallel download streams (default: 4)","4").option("--no-data","Download metadata only (skip large data files)").addHelpText("after",`
370
+ $ nemar dataset upload ./ds -j 16 # More parallel streams`).action(async(D,F)=>{let $=eD();if(!PD())console.log(E.red("Error: Not authenticated")),console.log("Run 'nemar auth login' first"),process.exit(1);if(!$v())console.log(E.yellow("Sandbox training required")),console.log(),console.log("You must complete sandbox training before uploading real datasets."),console.log("This verifies your setup and familiarizes you with the workflow."),console.log(),console.log("Run sandbox training with:"),console.log(E.cyan(" nemar sandbox")),process.exit(1);let B=MF(D);if(!LF(B))console.log(E.red(`Error: Path does not exist: ${B}`)),process.exit(1);let J=C("Checking prerequisites...").start(),Q=await A$();if(!Q.allPassed){J.fail("Prerequisites check failed"),console.log();for(let h of Q.errors)console.log(E.red(` - ${h}`));process.exit(1)}if(J.succeed("Prerequisites check passed"),console.log(E.gray(` git-annex ${Q.gitAnnex.version}`)),Q.githubSSH.username)console.log(E.gray(` GitHub SSH: ${Q.githubSSH.username}`));console.log(),J=C("Verifying GitHub CLI authentication...").start();let X=await L$($.githubUsername);if(!X.authenticated)J.fail("GitHub CLI not authenticated"),console.log(E.red(` ${X.error}`)),console.log(),console.log("GitHub CLI is required for dataset uploads. Install and authenticate:"),console.log(E.cyan(" brew install gh # or visit https://cli.github.com/")),console.log(E.cyan(" gh auth login")),process.exit(1);if($.githubUsername&&!X.matches)J.warn("GitHub CLI user mismatch"),console.log(E.yellow(` ${X.error}`)),console.log(),console.log("Your gh CLI is authenticated as a different GitHub account than your NEMAR account."),console.log("This may cause issues with repository access. To fix:"),console.log(E.cyan(` gh auth login # Login as ${$.githubUsername}`)),console.log(),console.log(E.yellow("WARNING: If upload fails with permission errors, this mismatch is the likely cause.")),console.log();else J.succeed(`GitHub CLI authenticated as ${X.username}`);if(!F.skipValidation){J=C("Validating BIDS dataset...").start();let h=MF(B,"dataset_description.json");if(!LF(h))J.fail("Not a valid BIDS dataset"),console.log(E.red("Missing required file: dataset_description.json")),process.exit(1);if(!(await k$()).installed)J.fail("Deno is required for BIDS validation"),console.log(),console.log(E.red("Error: Deno is not installed")),console.log(),console.log("The BIDS validator requires Deno runtime to run."),console.log("Install Deno with one of these commands:"),console.log(),console.log(E.cyan(" # macOS/Linux (curl)")),console.log(" curl -fsSL https://deno.land/install.sh | sh"),console.log(),console.log(E.cyan(" # macOS (Homebrew)")),console.log(" brew install deno"),console.log(),console.log(E.cyan(" # Windows (PowerShell)")),console.log(" irm https://deno.land/install.ps1 | iex"),console.log(),console.log("Learn more: https://docs.deno.com/runtime/getting_started/installation/"),console.log(),console.log(E.gray("To skip validation (not recommended): nemar dataset upload --skip-validation")),process.exit(1);try{let ED=await Wy(B,{prune:!0});if(!ED.valid)J.fail("Dataset has validation errors"),console.log(),console.log(qy(ED)),console.log(),console.log(E.yellow("Fix the errors above before uploading.")),console.log(E.gray("Or use --skip-validation to upload anyway (not recommended).")),process.exit(1);J.succeed(`Dataset is valid BIDS (${ED.warningCount} warnings)`)}catch(ED){J.fail("Validation failed"),console.log(E.red(ED.message)),process.exit(1)}console.log()}J=C("Analyzing dataset files...").start();let Y=F.name||UGD(B),G=await Dy(B);J.succeed(`Found ${G.files.length} files (${G.dataFiles} data, ${G.metadataFiles} metadata)`);let H=k2(B);if(console.log(),H)console.log(E.bold.yellow("Resume Upload:")),console.log(` Dataset ID: ${E.cyan(H.dataset_id)}`),console.log(` Last attempt: ${H.last_upload_at||H.created_at}`);else console.log(E.bold("Upload Plan:"));if(console.log(` Name: ${Y}`),console.log(` Path: ${B}`),console.log(` Files: ${G.files.length}`),console.log(` Size: ${C$(G.totalSize)}`),console.log(` Data files: ${G.dataFiles} (will be uploaded to S3)`),console.log(` Metadata files: ${G.metadataFiles} (will be stored in git)`),console.log(` Parallel jobs: ${F.jobs}`),console.log(),F.dryRun){console.log(E.yellow("Dry run mode - no changes made"));return}let W=await mD("Proceed with upload?",{yes:F.yes,no:F.no},!0);if(W!=="confirmed"){console.log(W==="declined"?"Upload skipped.":"Upload cancelled.");return}console.log();let K=G.files.filter((h)=>h.type==="data"),V;if(H!==null){J=C(`Resuming upload for ${H.dataset_id}...`).start();try{await X8(H.dataset_id);let h=await Rv(H.dataset_id,K.map((qD)=>qD.path));V={dataset_id:H.dataset_id,ssh_url:H.ssh_url,s3_prefix:H.s3_prefix,github_url:H.github_url,upload_urls:h.upload_urls,s3_config:H.s3_config},J.succeed(`Resuming upload: ${V.dataset_id}`)}catch(h){if(J.fail("Failed to resume upload"),h instanceof g){if(console.log(E.red(` ${h.message}`)),h.statusCode===404)console.log(E.yellow(" The dataset may have been deleted. Try uploading as a new dataset.")),console.log(E.gray(` Remove ${B}/.nemar to start fresh.`))}else console.log(E.red(` ${h.message}`));process.exit(1)}}else{J=C("Creating dataset in NEMAR...").start();try{let h=await z$({name:Y,description:F.description,files:K.map((ED)=>({path:ED.path,size:ED.size,type:ED.type}))});V={dataset_id:h.dataset.dataset_id,ssh_url:h.dataset.ssh_url,s3_prefix:h.dataset.s3_prefix,github_url:h.dataset.github_url,upload_urls:h.upload_urls||{},s3_config:h.s3_config};let qD={dataset_id:V.dataset_id,github_url:V.github_url,ssh_url:V.ssh_url,s3_prefix:V.s3_prefix,s3_config:V.s3_config,created_at:new Date().toISOString()};v2(B,qD),J.succeed(`Dataset created: ${V.dataset_id}`),await new Promise((ED)=>setTimeout(ED,1e4))}catch(h){if(J.fail("Failed to create dataset"),h instanceof g)console.log(E.red(` ${h.message}`));else console.log(E.red(` ${h.message}`));process.exit(1)}}J=C("Accepting GitHub repository invitation...").start();let Z=V.github_url?.match(/github\.com\/([a-zA-Z0-9_.-]+\/[a-zA-Z0-9_.-]+)/),U=Z?Z[1].replace(/\.git$/,""):null;if(!U)J.fail("Invalid GitHub repository URL from backend"),console.log(E.red(` Received: ${V.github_url||"(empty)"}`)),console.log(E.red(" Expected format: https://github.com/owner/repo")),console.log(),console.log("This may indicate a backend issue. Please contact support."),process.exit(1);let L=await M$(U);if(L.accepted)if(L.alreadyCollaborator)J.succeed("Already a collaborator on this repository");else J.succeed("GitHub invitation accepted");else J.warn("Could not auto-accept invitation"),console.log(E.yellow(` ${L.error}`)),console.log(),console.log("You may need to accept the invitation manually:"),console.log(E.cyan(` https://github.com/${U}/invitations`)),console.log();J=C("Initializing git-annex dataset...").start();let M=$.username&&$.email?{name:$.username,email:$.email}:void 0;if(!await Y8(B)){let h=await Z$(B,{author:M});if(!h.success)J.fail("Failed to initialize git-annex dataset"),console.log(E.red(` ${h.error}`)),process.exit(1)}let P=await pv(B);if(!P.success)J.fail("Failed to initialize git-annex"),console.log(E.red(` ${P.error}`)),process.exit(1);let u=await U$(B);if(!u.success)J.warn("Could not configure largefiles pattern"),console.log(E.gray(` ${u.error}`));J.succeed("git-annex dataset initialized"),J=C("Configuring GitHub remote...").start();let T=await N$(B,V.ssh_url);if(!T.success)J.fail("Failed to configure GitHub remote"),console.log(E.red(` ${T.error}`)),process.exit(1);J.succeed("GitHub remote configured");let O=Object.keys(V.upload_urls).length;if(O>0){J=C(`Uploading ${O} data files to S3...`).start();let h=0,qD=O,ED=await R$(B,V.upload_urls,{jobs:Number.parseInt(F.jobs,10),onProgress:(b6)=>{if(b6.status==="completed")h++,J.text=`Uploading data files to S3... (${h}/${qD})`}});if(!ED.success){J.fail(`Failed to upload some files (${ED.failed.length} failed)`);for(let b6 of ED.failed.slice(0,5))console.log(E.red(` - ${b6}`));if(ED.failed.length>5)console.log(E.red(` ... and ${ED.failed.length-5} more`));process.exit(1)}J.succeed(`Uploaded ${ED.uploaded} data files to S3`),J=C("Registering file URLs with git-annex...").start();let{s3_config:h6,s3_prefix:$6}=V,V5={};for(let b6 of Object.keys(V.upload_urls))V5[b6]=`${h6.public_url}/${$6}/objects/${b6}`;let x$=await w$(B,V5);if(!x$.success)J.warn(`Some URLs could not be registered (${x$.failed.length} failed)`);else J.succeed(`Registered ${x$.registered} file URLs with git-annex`)}else console.log(E.gray("No data files to upload to S3"));J=C("Saving dataset changes...").start();let YD=await W5(B,"Initial NEMAR dataset upload",M);if(!YD.success)J.fail("Failed to save dataset"),console.log(E.red(` ${YD.error}`)),process.exit(1);J.succeed("Dataset changes saved"),J=C("Pushing metadata to GitHub...").start();let yD=await E5(B);if(!yD.success)J.fail("Failed to push to GitHub"),console.log(E.red(` ${yD.error}`)),process.exit(1);J.succeed("Metadata pushed to GitHub"),J=C("Setting up BIDS validation CI...").start();try{await K$(V.dataset_id),J.succeed("BIDS validation CI configured")}catch(h){if(h instanceof g&&h.statusCode===403)J.info("CI workflow will be configured by an admin");else{let qD=h instanceof Error?h.message:String(h);J.warn(`Could not configure CI: ${qD}`),console.log(E.gray(` An admin can add it later with: nemar admin ci add ${V.dataset_id}`))}}Ay(B),console.log(),console.log(E.green.bold("Upload complete!")),console.log(),console.log(` Dataset ID: ${E.cyan(V.dataset_id)}`),console.log(` GitHub: ${E.cyan(V.github_url)}`),console.log(),console.log(E.gray("To clone this dataset:")),console.log(E.gray(` git clone ${V.ssh_url}`))});gD.command("download").description("Download a dataset from NEMAR").argument("<dataset-id>","Dataset ID (e.g., nm000104)").option("-o, --output <path>","Output directory (default: ./<dataset-id>)").option("-j, --jobs <number>","Parallel download streams (default: 4)","4").option("--no-data","Download metadata only (skip large data files)").addHelpText("after",`
371
371
  Description:
372
372
  Download a BIDS dataset from NEMAR. Uses git-annex for efficient
373
373
  data transfer with parallel streams.
@@ -598,7 +598,7 @@ Examples:
598
598
  $ nemar sandbox # Run sandbox training
599
599
  $ nemar sandbox status # Check if training is completed
600
600
  $ nemar sandbox reset # Reset for re-training
601
- `).action(jGD);async function jGD(){if(console.log(),console.log(E.bold("NEMAR Sandbox Training")),console.log(E.gray("Verify your setup and learn the upload workflow")),console.log(),!PD()){console.log(E.red("Not authenticated")),console.log(E.gray("Run 'nemar auth login' first"));return}let D=eD();if(D.sandboxCompleted){console.log(E.green("Sandbox training already completed!")),console.log(E.gray(`Dataset ID: ${D.sandboxDatasetId}`)),console.log(),console.log("You can upload real datasets with:"),console.log(E.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(E.gray("To re-run training, use: nemar sandbox reset"));return}console.log(E.bold("Step 1/6: Checking prerequisites..."));let F=C("Checking git-annex and SSH...").start(),$=await A$();if(!$.allPassed){F.fail("Prerequisites check failed"),console.log(),console.log(E.red("Missing requirements:"));for(let O of $.errors)console.log(E.yellow(` - ${O}`));if(!$.githubSSH.accessible)console.log(E.gray(" Run 'nemar auth setup-ssh' to configure SSH"));return}F.succeed("All prerequisites met");let B=C("Verifying GitHub CLI authentication...").start(),J=await L$(D.githubUsername);if(!J.authenticated){B.fail("GitHub CLI not authenticated"),console.log(E.red(` ${J.error}`)),console.log(),console.log("GitHub CLI is required for sandbox training. Install and authenticate:"),console.log(E.cyan(" brew install gh # or visit https://cli.github.com/")),console.log(E.cyan(" gh auth login"));return}if(D.githubUsername&&!J.matches)B.warn("GitHub CLI user mismatch"),console.log(E.yellow(` ${J.error}`)),console.log(),console.log("Your gh CLI is authenticated as a different GitHub account than your NEMAR account."),console.log("This may cause issues with repository access. To fix:"),console.log(E.cyan(` gh auth login # Login as ${D.githubUsername}`)),console.log(),console.log(E.yellow("WARNING: If upload fails with permission errors, this mismatch is the likely cause.")),console.log();else B.succeed(`GitHub CLI authenticated as ${J.username}`);console.log(),console.log(E.bold("Step 2/6: Generating test dataset..."));let Q=C("Creating minimal BIDS structure...").start(),X;try{let O=Ly();X=O.root;let YD=My(O);Q.succeed(`Test dataset created (${C$(YD)})`),console.log(E.gray(` Location: ${X}`))}catch(O){Q.fail("Failed to generate test dataset"),console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`));return}console.log(),console.log(E.bold("Step 3/6: Registering sandbox dataset..."));let Y=C("Creating dataset on NEMAR...").start(),G,H,W,K,V,A;try{let O=await z$({name:"Sandbox Training Dataset",description:"Placeholder dataset for sandbox training",files:[{path:"sub-01/eeg/sub-01_task-rest_eeg.edf",size:512000,type:"data"},{path:"dataset_description.json",size:200,type:"metadata"},{path:"participants.tsv",size:50,type:"metadata"},{path:"README",size:500,type:"metadata"},{path:"sub-01/eeg/sub-01_task-rest_eeg.json",size:300,type:"metadata"}],sandbox:!0});G=O.dataset.dataset_id,H=O.dataset.ssh_url,W=O.dataset.github_url,K=O.s3_config,V=O.dataset.s3_prefix,A=O.upload_urls||{},Y.succeed(`Sandbox dataset created: ${E.cyan(G)}`),console.log(E.gray(` GitHub: ${W}`)),await new Promise((YD)=>setTimeout(YD,1e4))}catch(O){if(Y.fail("Failed to create sandbox dataset"),O instanceof g)console.log(E.red(` ${O.message}`));else console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`));A1(X);return}let Z=C("Accepting GitHub repository invitation...").start(),U=W?.match(/github\.com\/([a-zA-Z0-9_.-]+\/[a-zA-Z0-9_.-]+)/),L=U?U[1].replace(/\.git$/,""):null;if(!L){Z.fail("Invalid GitHub repository URL from backend"),console.log(E.red(` Received: ${W||"(empty)"}`)),console.log(E.red(" Expected format: https://github.com/owner/repo")),console.log(),console.log("This may indicate a backend issue. Please contact support."),A1(X);return}let M=await M$(L);if(M.accepted)if(M.alreadyCollaborator)Z.succeed("Already a collaborator on this repository");else Z.succeed("GitHub invitation accepted");else Z.warn("Could not auto-accept invitation"),console.log(E.yellow(` ${M.error}`)),console.log(),console.log("You may need to accept the invitation manually:"),console.log(E.cyan(` https://github.com/${L}/invitations`)),console.log();console.log(),console.log(E.bold("Step 4/6: Initializing repository..."));let w=C("Setting up git-annex...").start(),P=D.username&&D.email?{name:D.username,email:D.email}:void 0;try{await Z$(X,{author:P}),await U$(X),await N$(X,H),w.succeed("Repository initialized")}catch(O){w.fail("Failed to initialize repository"),console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`)),A1(X);return}if(console.log(),console.log(E.bold("Step 5/6: Uploading to S3...")),Object.keys(A).length===0)console.log(E.yellow(" No data files to upload (metadata only)"));else{let O=C("Uploading test data...").start();try{let yD=0,h=Object.keys(A).length,qD=await R$(X,A,{jobs:4,onProgress:(ED)=>{if(ED.status==="completed"||ED.status==="failed")yD++,O.text=`Uploading... ${yD}/${h} files`}});if(qD.failed.length>0){O.fail(`Upload failed for ${qD.failed.length} file(s)`);for(let ED of qD.failed)console.log(E.red(` Failed: ${ED}`));if(qD.error)console.log(E.red(` Error: ${qD.error}`));console.log(),console.log(E.yellow("Sandbox training aborted due to upload failures.")),console.log(E.gray("Please check your network connection and try again.")),A1(X);return}O.succeed(`Uploaded ${qD.uploaded} file(s)`)}catch(yD){O.fail("Upload failed"),console.log(E.red(` ${yD instanceof Error?yD.message:"Unknown error"}`)),A1(X);return}let YD=C("Registering file URLs...").start();try{let yD={};for(let qD of Object.keys(A))yD[qD]=`${K.public_url}/${V}/${qD}`;let h=await w$(X,yD);if(!h.success){YD.fail(`URL registration failed for ${h.failed.length} file(s)`);for(let qD of h.failed)console.log(E.red(` Failed: ${qD}`));console.log(),console.log(E.yellow("Sandbox training aborted due to URL registration failures.")),console.log(E.gray("This may indicate a git-annex configuration issue.")),A1(X);return}YD.succeed(`Registered ${h.registered} file URLs`)}catch(yD){YD.fail("Failed to register URLs"),console.log(E.red(` ${yD instanceof Error?yD.message:"Unknown error"}`)),A1(X);return}}console.log(),console.log(E.bold("Step 6/6: Pushing to GitHub..."));let u=C("Saving and pushing...").start();try{await W5(X,"Initial sandbox training upload",P),await E5(X),u.succeed("Pushed to GitHub")}catch(O){u.fail("Failed to push to GitHub"),console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`)),A1(X);return}let T=C("Finalizing...").start();try{await V$(G),await Iv(G),K0("sandboxCompleted",!0),K0("sandboxDatasetId",G),T.succeed("Sandbox training complete!")}catch(O){T.fail("Failed to finalize"),console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`)),A1(X);return}A1(X),console.log(),console.log(E.green.bold("Congratulations! Sandbox training completed successfully.")),console.log(),console.log("Your setup is verified and you're ready to upload real datasets:"),console.log(E.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(E.gray(`Sandbox dataset: ${G}`))}y$.command("status").description("Check sandbox training completion status").option("--refresh","Fetch latest status from server").action(async(D)=>{if(!PD()){console.log(E.red("Not authenticated")),console.log(E.gray("Run 'nemar auth login' first"));return}if(D.refresh){let F=C("Checking status...").start();try{let $=await Sv();if(K0("sandboxCompleted",$.sandbox_completed),$.sandbox_dataset_id)K0("sandboxDatasetId",$.sandbox_dataset_id);if(F.stop(),$.sandbox_completed){if(console.log(E.green("Sandbox training: Completed")),console.log(E.gray(` Dataset ID: ${$.sandbox_dataset_id}`)),$.sandbox_completed_at)console.log(E.gray(` Completed: ${$.sandbox_completed_at}`))}else console.log(E.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(E.cyan(" nemar sandbox"))}catch($){if(F.fail("Failed to check status"),$ instanceof g)console.log(E.red(` ${$.message}`))}}else{let F=eD();if(F.sandboxCompleted)console.log(E.green("Sandbox training: Completed")),console.log(E.gray(` Dataset ID: ${F.sandboxDatasetId}`));else console.log(E.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(E.cyan(" nemar sandbox"))}});y$.command("reset").description("Reset sandbox training status for re-training").option(xD,_D).option(lD,dD).action(async(D)=>{if(!PD()){console.log(E.red("Not authenticated")),console.log(E.gray("Run 'nemar auth login' first"));return}if(!eD().sandboxCompleted){console.log(E.yellow("Sandbox training not yet completed")),console.log(E.gray("Nothing to reset"));return}let $=await mD("Reset sandbox training status? You will need to complete training again.",D);if($!=="confirmed"){console.log(E.gray($==="declined"?"Skipped":"Cancelled"));return}let B=C("Resetting sandbox status...").start();try{await Pv(),V2("sandboxCompleted"),V2("sandboxDatasetId"),B.succeed("Sandbox status reset"),console.log(),console.log("Run sandbox training again with:"),console.log(E.cyan(" nemar sandbox"))}catch(J){if(B.fail("Failed to reset"),J instanceof g)console.log(E.red(` ${J.message}`));else console.log(E.red(` ${J instanceof Error?J.message:"Unknown error"}`))}});var Ny={name:"nemar-cli",version:"0.3.11-dev.170",description:"CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",type:"module",main:"dist/index.js",bin:{nemar:"dist/index.js"},scripts:{dev:"bun run src/index.ts",build:"bun build src/index.ts --outdir dist --target bun --minify && sed '1s|#!/usr/bin/env node|#!/usr/bin/env bun|' dist/index.js > dist/index.js.tmp && mv dist/index.js.tmp dist/index.js",test:"bun test",lint:"biome check src/","lint:fix":"biome check --fix src/",format:"biome format --write src/",typecheck:"tsc --noEmit",prepublishOnly:"bun run build","docs:generate":"bun run scripts/generate-docs.ts","docs:serve":"mkdocs serve","docs:build":"mkdocs build"},keywords:["nemar","bids","neuroimaging","eeg","emg","datalad","cli"],author:"NEMAR Team",license:"MIT",repository:{type:"git",url:"git+https://github.com/nemarDatasets/nemar-cli.git"},bugs:{url:"https://github.com/nemarDatasets/nemar-cli/issues"},homepage:"https://nemar-cli.pages.dev",engines:{bun:">=1.0.0"},files:["dist","README.md","LICENSE"],dependencies:{chalk:"^5.3.0",commander:"^12.1.0",conf:"^13.0.1",inquirer:"^9.2.15",ora:"^8.0.1",zod:"^3.23.8"},devDependencies:{"@biomejs/biome":"1.9.4","@types/bcryptjs":"^3.0.0","@types/bun":"latest","@types/inquirer":"^9.0.7",bcryptjs:"^3.0.3",typescript:"^5.5.4"}};var Cy=Ny.version;var Z1=new D0;Z1.name("nemar").description(`CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource)
601
+ `).action(jGD);async function jGD(){if(console.log(),console.log(E.bold("NEMAR Sandbox Training")),console.log(E.gray("Verify your setup and learn the upload workflow")),console.log(),!PD()){console.log(E.red("Not authenticated")),console.log(E.gray("Run 'nemar auth login' first"));return}let D=eD();if(D.sandboxCompleted){console.log(E.green("Sandbox training already completed!")),console.log(E.gray(`Dataset ID: ${D.sandboxDatasetId}`)),console.log(),console.log("You can upload real datasets with:"),console.log(E.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(E.gray("To re-run training, use: nemar sandbox reset"));return}console.log(E.bold("Step 1/6: Checking prerequisites..."));let F=C("Checking git-annex and SSH...").start(),$=await A$();if(!$.allPassed){F.fail("Prerequisites check failed"),console.log(),console.log(E.red("Missing requirements:"));for(let O of $.errors)console.log(E.yellow(` - ${O}`));if(!$.githubSSH.accessible)console.log(E.gray(" Run 'nemar auth setup-ssh' to configure SSH"));return}F.succeed("All prerequisites met");let B=C("Verifying GitHub CLI authentication...").start(),J=await L$(D.githubUsername);if(!J.authenticated){B.fail("GitHub CLI not authenticated"),console.log(E.red(` ${J.error}`)),console.log(),console.log("GitHub CLI is required for sandbox training. Install and authenticate:"),console.log(E.cyan(" brew install gh # or visit https://cli.github.com/")),console.log(E.cyan(" gh auth login"));return}if(D.githubUsername&&!J.matches)B.warn("GitHub CLI user mismatch"),console.log(E.yellow(` ${J.error}`)),console.log(),console.log("Your gh CLI is authenticated as a different GitHub account than your NEMAR account."),console.log("This may cause issues with repository access. To fix:"),console.log(E.cyan(` gh auth login # Login as ${D.githubUsername}`)),console.log(),console.log(E.yellow("WARNING: If upload fails with permission errors, this mismatch is the likely cause.")),console.log();else B.succeed(`GitHub CLI authenticated as ${J.username}`);console.log(),console.log(E.bold("Step 2/6: Generating test dataset..."));let Q=C("Creating minimal BIDS structure...").start(),X;try{let O=Ly();X=O.root;let YD=My(O);Q.succeed(`Test dataset created (${C$(YD)})`),console.log(E.gray(` Location: ${X}`))}catch(O){Q.fail("Failed to generate test dataset"),console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`));return}console.log(),console.log(E.bold("Step 3/6: Registering sandbox dataset..."));let Y=C("Creating dataset on NEMAR...").start(),G,H,W,K,V,A;try{let O=await z$({name:"Sandbox Training Dataset",description:"Placeholder dataset for sandbox training",files:[{path:"sub-01/eeg/sub-01_task-rest_eeg.edf",size:512000,type:"data"},{path:"dataset_description.json",size:200,type:"metadata"},{path:"participants.tsv",size:50,type:"metadata"},{path:"README",size:500,type:"metadata"},{path:"sub-01/eeg/sub-01_task-rest_eeg.json",size:300,type:"metadata"}],sandbox:!0});G=O.dataset.dataset_id,H=O.dataset.ssh_url,W=O.dataset.github_url,K=O.s3_config,V=O.dataset.s3_prefix,A=O.upload_urls||{},Y.succeed(`Sandbox dataset created: ${E.cyan(G)}`),console.log(E.gray(` GitHub: ${W}`)),await new Promise((YD)=>setTimeout(YD,1e4))}catch(O){if(Y.fail("Failed to create sandbox dataset"),O instanceof g)console.log(E.red(` ${O.message}`));else console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`));A1(X);return}let Z=C("Accepting GitHub repository invitation...").start(),U=W?.match(/github\.com\/([a-zA-Z0-9_.-]+\/[a-zA-Z0-9_.-]+)/),L=U?U[1].replace(/\.git$/,""):null;if(!L){Z.fail("Invalid GitHub repository URL from backend"),console.log(E.red(` Received: ${W||"(empty)"}`)),console.log(E.red(" Expected format: https://github.com/owner/repo")),console.log(),console.log("This may indicate a backend issue. Please contact support."),A1(X);return}let M=await M$(L);if(M.accepted)if(M.alreadyCollaborator)Z.succeed("Already a collaborator on this repository");else Z.succeed("GitHub invitation accepted");else Z.warn("Could not auto-accept invitation"),console.log(E.yellow(` ${M.error}`)),console.log(),console.log("You may need to accept the invitation manually:"),console.log(E.cyan(` https://github.com/${L}/invitations`)),console.log();console.log(),console.log(E.bold("Step 4/6: Initializing repository..."));let w=C("Setting up git-annex...").start(),P=D.username&&D.email?{name:D.username,email:D.email}:void 0;try{await Z$(X,{author:P}),await U$(X),await N$(X,H),w.succeed("Repository initialized")}catch(O){w.fail("Failed to initialize repository"),console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`)),A1(X);return}if(console.log(),console.log(E.bold("Step 5/6: Uploading to S3...")),Object.keys(A).length===0)console.log(E.yellow(" No data files to upload (metadata only)"));else{let O=C("Uploading test data...").start();try{let yD=0,h=Object.keys(A).length,qD=await R$(X,A,{jobs:4,onProgress:(ED)=>{if(ED.status==="completed"||ED.status==="failed")yD++,O.text=`Uploading... ${yD}/${h} files`}});if(qD.failed.length>0){O.fail(`Upload failed for ${qD.failed.length} file(s)`);for(let ED of qD.failed)console.log(E.red(` Failed: ${ED}`));if(qD.error)console.log(E.red(` Error: ${qD.error}`));console.log(),console.log(E.yellow("Sandbox training aborted due to upload failures.")),console.log(E.gray("Please check your network connection and try again.")),A1(X);return}O.succeed(`Uploaded ${qD.uploaded} file(s)`)}catch(yD){O.fail("Upload failed"),console.log(E.red(` ${yD instanceof Error?yD.message:"Unknown error"}`)),A1(X);return}let YD=C("Registering file URLs...").start();try{let yD={};for(let qD of Object.keys(A))yD[qD]=`${K.public_url}/${V}/objects/${qD}`;let h=await w$(X,yD);if(!h.success){YD.fail(`URL registration failed for ${h.failed.length} file(s)`);for(let qD of h.failed)console.log(E.red(` Failed: ${qD}`));console.log(),console.log(E.yellow("Sandbox training aborted due to URL registration failures.")),console.log(E.gray("This may indicate a git-annex configuration issue.")),A1(X);return}YD.succeed(`Registered ${h.registered} file URLs`)}catch(yD){YD.fail("Failed to register URLs"),console.log(E.red(` ${yD instanceof Error?yD.message:"Unknown error"}`)),A1(X);return}}console.log(),console.log(E.bold("Step 6/6: Pushing to GitHub..."));let u=C("Saving and pushing...").start();try{await W5(X,"Initial sandbox training upload",P),await E5(X),u.succeed("Pushed to GitHub")}catch(O){u.fail("Failed to push to GitHub"),console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`)),A1(X);return}let T=C("Finalizing...").start();try{await V$(G),await Iv(G),K0("sandboxCompleted",!0),K0("sandboxDatasetId",G),T.succeed("Sandbox training complete!")}catch(O){T.fail("Failed to finalize"),console.log(E.red(` ${O instanceof Error?O.message:"Unknown error"}`)),A1(X);return}A1(X),console.log(),console.log(E.green.bold("Congratulations! Sandbox training completed successfully.")),console.log(),console.log("Your setup is verified and you're ready to upload real datasets:"),console.log(E.cyan(" nemar dataset upload ./your-dataset")),console.log(),console.log(E.gray(`Sandbox dataset: ${G}`))}y$.command("status").description("Check sandbox training completion status").option("--refresh","Fetch latest status from server").action(async(D)=>{if(!PD()){console.log(E.red("Not authenticated")),console.log(E.gray("Run 'nemar auth login' first"));return}if(D.refresh){let F=C("Checking status...").start();try{let $=await Sv();if(K0("sandboxCompleted",$.sandbox_completed),$.sandbox_dataset_id)K0("sandboxDatasetId",$.sandbox_dataset_id);if(F.stop(),$.sandbox_completed){if(console.log(E.green("Sandbox training: Completed")),console.log(E.gray(` Dataset ID: ${$.sandbox_dataset_id}`)),$.sandbox_completed_at)console.log(E.gray(` Completed: ${$.sandbox_completed_at}`))}else console.log(E.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(E.cyan(" nemar sandbox"))}catch($){if(F.fail("Failed to check status"),$ instanceof g)console.log(E.red(` ${$.message}`))}}else{let F=eD();if(F.sandboxCompleted)console.log(E.green("Sandbox training: Completed")),console.log(E.gray(` Dataset ID: ${F.sandboxDatasetId}`));else console.log(E.yellow("Sandbox training: Not completed")),console.log(),console.log("Run sandbox training with:"),console.log(E.cyan(" nemar sandbox"))}});y$.command("reset").description("Reset sandbox training status for re-training").option(xD,_D).option(lD,dD).action(async(D)=>{if(!PD()){console.log(E.red("Not authenticated")),console.log(E.gray("Run 'nemar auth login' first"));return}if(!eD().sandboxCompleted){console.log(E.yellow("Sandbox training not yet completed")),console.log(E.gray("Nothing to reset"));return}let $=await mD("Reset sandbox training status? You will need to complete training again.",D);if($!=="confirmed"){console.log(E.gray($==="declined"?"Skipped":"Cancelled"));return}let B=C("Resetting sandbox status...").start();try{await Pv(),V2("sandboxCompleted"),V2("sandboxDatasetId"),B.succeed("Sandbox status reset"),console.log(),console.log("Run sandbox training again with:"),console.log(E.cyan(" nemar sandbox"))}catch(J){if(B.fail("Failed to reset"),J instanceof g)console.log(E.red(` ${J.message}`));else console.log(E.red(` ${J instanceof Error?J.message:"Unknown error"}`))}});var Ny={name:"nemar-cli",version:"0.3.11-dev.172",description:"CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",type:"module",main:"dist/index.js",bin:{nemar:"dist/index.js"},scripts:{dev:"bun run src/index.ts",build:"bun build src/index.ts --outdir dist --target bun --minify && sed '1s|#!/usr/bin/env node|#!/usr/bin/env bun|' dist/index.js > dist/index.js.tmp && mv dist/index.js.tmp dist/index.js",test:"bun test",lint:"biome check src/","lint:fix":"biome check --fix src/",format:"biome format --write src/",typecheck:"tsc --noEmit",prepublishOnly:"bun run build","docs:generate":"bun run scripts/generate-docs.ts","docs:serve":"mkdocs serve","docs:build":"mkdocs build"},keywords:["nemar","bids","neuroimaging","eeg","emg","datalad","cli"],author:"NEMAR Team",license:"MIT",repository:{type:"git",url:"git+https://github.com/nemarDatasets/nemar-cli.git"},bugs:{url:"https://github.com/nemarDatasets/nemar-cli/issues"},homepage:"https://nemar-cli.pages.dev",engines:{bun:">=1.0.0"},files:["dist","README.md","LICENSE"],dependencies:{chalk:"^5.3.0",commander:"^12.1.0",conf:"^13.0.1",inquirer:"^9.2.15",ora:"^8.0.1",zod:"^3.23.8"},devDependencies:{"@biomejs/biome":"1.9.4","@types/bcryptjs":"^3.0.0","@types/bun":"latest","@types/inquirer":"^9.0.7",bcryptjs:"^3.0.3",typescript:"^5.5.4"}};var Cy=Ny.version;var Z1=new D0;Z1.name("nemar").description(`CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource)
602
602
 
603
603
  NEMAR is a curated repository for neurophysiology data in BIDS format.
604
604
  This CLI provides tools for uploading, downloading, and managing datasets.`).version(Cy,"-v, --version","Output the current version").option("--no-color","Disable colored output").option("--verbose","Enable verbose output").addHelpText("after",`
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nemar-cli",
3
- "version": "0.3.11-dev.170",
3
+ "version": "0.3.11-dev.172",
4
4
  "description": "CLI for NEMAR (Neuroelectromagnetic Data Archive and Tools Resource) dataset management",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",