pipen-cli-gbatch 0.0.2__tar.gz → 0.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pipen-cli-gbatch might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: pipen-cli-gbatch
3
- Version: 0.0.2
3
+ Version: 0.0.4
4
4
  Summary: A pipen cli plugin to run command via Google Cloud Batch
5
5
  License: MIT
6
6
  Author: pwwang
@@ -13,9 +13,12 @@ Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Programming Language :: Python :: 3.13
16
+ Classifier: Programming Language :: Python :: 3.14
16
17
  Requires-Dist: google-cloud-storage (>=3.0.0,<4.0.0)
17
18
  Requires-Dist: pipen (>=0.17.19,<0.18.0)
18
19
  Requires-Dist: pipen-poplog (>=0.3.6,<0.4.0)
20
+ Project-URL: Homepage, https://github.com/pwwang/pipen-cli-gbatch
21
+ Project-URL: Repository, https://github.com/pwwang/pipen-cli-gbatch
19
22
  Description-Content-Type: text/markdown
20
23
 
21
24
  # pipen-cli-gbatch
@@ -79,7 +79,7 @@ from pipen.cli import CLIPlugin
79
79
  from pipen.scheduler import GbatchScheduler
80
80
  from pipen_poplog import LogsPopulator
81
81
 
82
- __version__ = "0.0.2"
82
+ __version__ = "0.0.4"
83
83
  __all__ = ("CliGbatchPlugin", "CliGbatchDaemon")
84
84
 
85
85
 
@@ -117,6 +117,11 @@ class CliGbatchDaemon:
117
117
 
118
118
  self.config.prescript = self.config.get("prescript", None) or ""
119
119
  self.config.postscript = self.config.get("postscript", None) or ""
120
+ if "labels" in self.config and isinstance(self.config.labels, list):
121
+ self.config.labels = {
122
+ key: val
123
+ for key, val in (item.split("=", 1) for item in self.config.labels)
124
+ }
120
125
  self.command = command
121
126
 
122
127
  def _get_arg_from_command(self, arg: str) -> str | None:
@@ -183,7 +188,7 @@ class CliGbatchDaemon:
183
188
  """
184
189
  mount = self.config.get("mount", [])
185
190
  # mount the workdir
186
- mount.append(f'{source}:{target}')
191
+ mount.append(f"{source}:{target}")
187
192
 
188
193
  self.config["mount"] = mount
189
194
 
@@ -197,7 +202,7 @@ class CliGbatchDaemon:
197
202
  SystemExit: If workdir is not a valid Google Storage bucket path.
198
203
  """
199
204
  command_workdir = self._get_arg_from_command("workdir")
200
- workdir = self.config.get("workdir", command_workdir)
205
+ workdir = self.config.get("workdir", None) or command_workdir
201
206
 
202
207
  if not workdir or not isinstance(AnyPath(workdir), GSPath):
203
208
  print(
@@ -312,7 +317,7 @@ class CliGbatchDaemon:
312
317
 
313
318
  def _show_scheduler_opts(self):
314
319
  """Log the scheduler options for debugging purposes."""
315
- logger.debug("Scheduler Options:")
320
+ logger.info("Scheduler Options:")
316
321
  for key, val in self.config.items():
317
322
  if key in (
318
323
  "workdir",
@@ -332,7 +337,7 @@ class CliGbatchDaemon:
332
337
  ):
333
338
  continue
334
339
 
335
- logger.debug(f"- {key}: {val}")
340
+ logger.info(f"- {key}: {val}")
336
341
 
337
342
  async def _run_wait(self): # pragma: no cover
338
343
  """Run the pipeline and wait for completion.
@@ -493,7 +498,7 @@ class CliGbatchDaemon:
493
498
  )
494
499
  sys.exit(1)
495
500
 
496
- if 'name' not in self.config:
501
+ if "name" not in self.config or not self.config.name:
497
502
  self.config["name"] = "PipenCliGbatchDaemon"
498
503
 
499
504
  async def run(self): # pragma: no cover
@@ -553,6 +558,17 @@ class XquteCliGbatchPlugin: # pragma: no cover
553
558
  logger.error(f"/STDERR {self.stderr_populator.residue}")
554
559
  self.stderr_populator.residue = ""
555
560
 
561
+ @plugin.impl
562
+ async def on_job_init(self, scheduler, job):
563
+ """Handle job initialization event.
564
+
565
+ Args:
566
+ scheduler: The scheduler instance.
567
+ job: The job being initialized.
568
+ """
569
+ self.stdout_populator.logfile = scheduler.workdir.joinpath("0", "job.stdout")
570
+ self.stderr_populator.logfile = scheduler.workdir.joinpath("0", "job.stderr")
571
+
556
572
  @plugin.impl
557
573
  async def on_job_started(self, scheduler, job):
558
574
  """Handle job start event by setting up log file paths.
@@ -564,8 +580,6 @@ class XquteCliGbatchPlugin: # pragma: no cover
564
580
  if not self.log_start:
565
581
  return
566
582
 
567
- self.stdout_populator.logfile = scheduler.workdir.joinpath("0", "job.stdout")
568
- self.stderr_populator.logfile = scheduler.workdir.joinpath("0", "job.stderr")
569
583
  logger.info("Job is picked up by Google Batch, pulling stdout/stderr...")
570
584
 
571
585
  @plugin.impl
@@ -693,29 +707,29 @@ class CliGbatchPlugin(CLIPlugin): # pragma: no cover
693
707
  super().__init__(parser, subparser)
694
708
  subparser.epilog = """\033[1;4mExamples\033[0m:
695
709
 
696
- \u200B
710
+ \u200b
697
711
  # Run a command and wait for it to complete
698
712
  > pipen gbatch --workdir gs://my-bucket/workdir -- \\
699
713
  python myscript.py --input input.txt --output output.txt
700
714
 
701
- \u200B
715
+ \u200b
702
716
  # Use named mounts
703
717
  > pipen gbatch --workdir gs://my-bucket/workdir --mount INFILE=gs://bucket/path/to/file \\
704
718
  --mount OUTDIR=gs://bucket/path/to/outdir -- \\
705
719
  bash -c 'cat $INFILE > $OUTDIR/output.txt'
706
720
 
707
- \u200B
721
+ \u200b
708
722
  # Run a command in a detached mode
709
723
  > pipen gbatch --nowait --project $PROJECT --location $LOCATION \\
710
724
  --workdir gs://my-bucket/workdir -- \\
711
725
  python myscript.py --input input.txt --output output.txt
712
726
 
713
- \u200B
727
+ \u200b
714
728
  # If you have a profile defined in ~/.pipen.toml or ./.pipen.toml
715
729
  > pipen gbatch --profile myprofile -- \\
716
730
  python myscript.py --input input.txt --output output.txt
717
731
 
718
- \u200B
732
+ \u200b
719
733
  # View the logs of a previously run command
720
734
  > pipen gbatch --view-logs all --name my-daemon-name \\
721
735
  --workdir gs://my-bucket/workdir
@@ -753,12 +767,36 @@ class CliGbatchPlugin(CLIPlugin): # pragma: no cover
753
767
  CONFIG_FILES,
754
768
  known_parsed.profile,
755
769
  )
770
+
771
+ def is_valid(val: Any) -> bool:
772
+ """Check if a value is valid (not None, not empty string, not empty list).
773
+ """
774
+ if val is None:
775
+ return False
776
+ if isinstance(val, bool):
777
+ return True
778
+ return bool(val)
779
+
756
780
  # update parsed with the defaults
757
781
  for key, val in defaults.items():
782
+ if (
783
+ key == "mount"
784
+ and val
785
+ and getattr(known_parsed, key, None)
786
+ ):
787
+ if not isinstance(val, (tuple, list)):
788
+ val = [val]
789
+ val = list(val)
790
+
791
+ kp_mount = getattr(known_parsed, key)
792
+ val.extend(kp_mount)
793
+ setattr(known_parsed, key, val)
794
+ continue
795
+
758
796
  if (
759
797
  key == "command"
760
798
  or val is None
761
- or getattr(known_parsed, key, None) is not None
799
+ or is_valid(getattr(known_parsed, key, None))
762
800
  ):
763
801
  continue
764
802
 
@@ -120,7 +120,7 @@ help = "The location to run the job."
120
120
  flags = ["--mount"]
121
121
  # type = "list"
122
122
  default = []
123
- action = "clear_append"
123
+ action = "append"
124
124
  help = """The list of mounts to mount to the VM, each in the format of SOURCE:TARGET, where SOURCE must be either a Google Storage Bucket path (gs://...).
125
125
  You can also use named mounts like `INDIR=gs://my-bucket/inputs` and the directory will be mounted to `/mnt/disks/INDIR` in the VM;
126
126
  then you can use environment variable `$INDIR` in the command/script to refer to the mounted path.
@@ -186,20 +186,21 @@ and positive values mean to run after the main command."""
186
186
  [[groups.arguments]]
187
187
  flags = ["--allocationPolicy"]
188
188
  type = "json"
189
- default = "{}"
189
+ default = {}
190
190
  help = "The JSON string of extra settings of allocationPolicy add to the job.json. Refer to https://cloud.google.com/batch/docs/reference/rest/v1/projects.locations.jobs#AllocationPolicy for details."
191
191
 
192
192
  [[groups.arguments]]
193
193
  flags = ["--taskGroups"]
194
194
  type = "json"
195
- default = "[]"
195
+ default = []
196
196
  help = "The JSON string of extra settings of taskGroups add to the job.json. Refer to https://cloud.google.com/batch/docs/reference/rest/v1/projects.locations.jobs#TaskGroup for details."
197
197
 
198
198
  [[groups.arguments]]
199
199
  flags = ["--labels"]
200
- type = "json"
201
- default = "{}"
202
- help = "The JSON string of labels to add to the job. Refer to https://cloud.google.com/batch/docs/reference/rest/v1/projects.locations.jobs#Job.FIELDS.labels for details."
200
+ # type = "json"
201
+ default = []
202
+ action = "clear_append"
203
+ help = "The strings of labels to add to the job (key=value). Refer to https://cloud.google.com/batch/docs/reference/rest/v1/projects.locations.jobs#Job.FIELDS.labels for details."
203
204
 
204
205
  [[groups.arguments]]
205
206
  flags = ["--gcloud"]
@@ -1,18 +1,21 @@
1
- [project]
1
+ [tool.poetry]
2
2
  name = "pipen-cli-gbatch"
3
- version = "0.0.2"
3
+ version = "0.0.4"
4
4
  description = "A pipen cli plugin to run command via Google Cloud Batch"
5
- authors = [
6
- {name = "pwwang",email = "pwwang@pwwang.com"}
7
- ]
8
- license = {text = "MIT"}
5
+ authors = ["pwwang <pwwang@pwwang.com>"]
6
+ license = "MIT"
9
7
  readme = "README.md"
10
- requires-python = ">=3.9,<4.0"
11
- dependencies = [
12
- "pipen (>=0.17.19,<0.18.0)",
13
- "pipen-poplog (>=0.3.6,<0.4.0)",
14
- "google-cloud-storage (>=3.0.0,<4.0.0)"
15
- ]
8
+ homepage = "https://github.com/pwwang/pipen-cli-gbatch"
9
+ repository = "https://github.com/pwwang/pipen-cli-gbatch"
10
+
11
+ [tool.poetry.build]
12
+ generate-setup-file = true
13
+
14
+ [tool.poetry.dependencies]
15
+ python = "^3.9"
16
+ pipen = "^0.17.19"
17
+ pipen-poplog = "^0.3.6"
18
+ google-cloud-storage = "^3.0.0"
16
19
 
17
20
  [tool.pytest.ini_options]
18
21
  addopts = "-vv -n auto --dist loadgroup -p no:benchmark -W error::UserWarning --cov-config=.coveragerc --cov=pipen_cli_gbatch --cov-report xml:.coverage.xml --cov-report term-missing"
@@ -0,0 +1,36 @@
1
+ # -*- coding: utf-8 -*-
2
+ from setuptools import setup
3
+
4
+ packages = \
5
+ ['pipen_cli_gbatch']
6
+
7
+ package_data = \
8
+ {'': ['*']}
9
+
10
+ install_requires = \
11
+ ['google-cloud-storage>=3.0.0,<4.0.0',
12
+ 'pipen-poplog>=0.3.6,<0.4.0',
13
+ 'pipen>=0.17.19,<0.18.0']
14
+
15
+ entry_points = \
16
+ {'pipen_cli': ['cli-gbatch = pipen_cli_gbatch:CliGbatchPlugin']}
17
+
18
+ setup_kwargs = {
19
+ 'name': 'pipen-cli-gbatch',
20
+ 'version': '0.0.4',
21
+ 'description': 'A pipen cli plugin to run command via Google Cloud Batch',
22
+ 'long_description': '# pipen-cli-gbatch\n\nA pipen CLI plugin to run commands via Google Cloud Batch.\n\nThe idea is to submit the command using xqute and use the gbatch scheduler to run it on Google Cloud Batch.\n\n## Installation\n\n```bash\npip install pipen-cli-gbatch\n```\n\n## Usage\n\n### Basic Command Execution\n\nTo run a command like:\n\n```bash\npython myscript.py --input input.txt --output output.txt\n```\n\nYou can run it with:\n\n```bash\npipen gbatch -- python myscript.py --input input.txt --output output.txt\n```\n\n### With Configuration File\n\nIn order to provide configurations like we do for a normal pipen pipeline, you can also provide a config file (the `[pipen-cli-gbatch]` section will be used):\n\n```bash\npipen gbatch @config.toml -- \\\n python myscript.py --input input.txt --output output.txt\n```\n\n### Detached Mode\n\nWe can also use the `--nowait` option to run the command in a detached mode:\n\n```bash\npipen gbatch --nowait -- \\\n python myscript.py --input input.txt --output output.txt\n```\n\nOr by default, it will wait for the command to complete:\n\n```bash\npipen gbatch -- \\\n python myscript.py --input input.txt --output output.txt\n```\n\nWhile waiting, the running logs will be pulled and shown in the terminal.\n\n### View Logs\n\nWhen running in detached mode, one can also pull the logs later by:\n\n```bash\npipen gbatch --view-logs -- \\\n python myscript.py --input input.txt --output output.txt\n\n# or just provide the workdir\npipen gbatch --view-logs --workdir gs://my-bucket/workdir\n```\n\n## Configuration\n\nBecause the daemon pipeline is running on Google Cloud Batch, a Google Storage Bucket path is required for the workdir. For example: `gs://my-bucket/workdir`\n\nA unique job ID will be generated per the name (`--name`) and workdir, so that if the same command is run again with the same name and workdir, it will not start a new job, but just attach to the existing job and pull the logs.\n\nIf `--name` is not provided in the command line, it will try to grab the name (`--name`) from the command line arguments after `--`, or else use "name" from the root section of the configuration file, with a "GbatchDaemon" suffix. If nothing can be found, a default name "PipenGbatchDaemon" will be used.\n\nThen a workdir `{workdir}/<daemon pipeline name>/` will be created to store the meta information.\n\nWith `--profile` provided, the scheduler options (`scheduler_opts`) defined in `~/.pipen.toml` and `./.pipen.toml` will be used as default.\n\n## All Options\n\n```bash\n> pipen gbatch --help\nUsage: pipen gbatch [-h] [--nowait | --view-logs {all,stdout,stderr}] [--workdir WORKDIR]\n [--error-strategy {retry,halt}] [--num-retries NUM_RETRIES] [--prescript PRESCRIPT]\n [--postscript POSTSCRIPT] [--jobname-prefix JOBNAME_PREFIX] [--recheck-interval RECHECK_INTERVAL]\n [--cwd CWD] [--project PROJECT] [--location LOCATION] [--mount MOUNT]\n [--service-account SERVICE_ACCOUNT] [--network NETWORK] [--subnetwork SUBNETWORK]\n [--no-external-ip-address] [--machine-type MACHINE_TYPE] [--provisioning-model {STANDARD,SPOT}]\n [--image-uri IMAGE_URI] [--entrypoint ENTRYPOINT] [--commands COMMANDS] [--runnables RUNNABLES]\n [--allocationPolicy ALLOCATIONPOLICY] [--taskGroups TASKGROUPS] [--labels LABELS] [--gcloud GCLOUD]\n [--name NAME] [--profile PROFILE] [--version]\n [--loglevel {DEBUG,INFO,WARNING,ERROR,CRITICAL,debug,info,warning,error,critical}]\n ...\n\nSimplify running commands via Google Cloud Batch.\n\nKey Options:\n The key options to run the command.\n\n --workdir WORKDIR The workdir (a Google Storage Bucket path is required) to store the meta information of the\n daemon pipeline.\n If not provided, the one from the command will be used.\n command The command passed after `--` to run, with all its arguments. Note that the command should be\n provided after `--`.\n\nScheduler Options:\n The options to configure the gbatch scheduler.\n\n --error-strategy {retry,halt}\n The strategy when there is error happened [default: halt]\n --num-retries NUM_RETRIES\n The number of retries when there is error happened. Only valid when --error-strategy is \'retry\'.\n [default: 0]\n --prescript PRESCRIPT\n The prescript to run before the main command.\n --postscript POSTSCRIPT\n The postscript to run after the main command.\n --jobname-prefix JOBNAME_PREFIX\n The prefix of the name prefix of the daemon job.\n If not provided, try to generate one from the command to run.\n If the command is also not provided, use \'pipen-gbatch-daemon\' as the prefix.\n --recheck-interval RECHECK_INTERVAL\n The interval to recheck the job status, each takes about 0.1 seconds. [default: 600]\n --cwd CWD The working directory to run the command. If not provided, the current directory is used. You\n can pass either a mounted path (inside the VM) or a Google Storage Bucket path (gs://...). If a\n Google Storage Bucket path is provided, the mounted path will be inferred from the mounted paths\n of the VM.\n --project PROJECT The Google Cloud project to run the job.\n --location LOCATION The location to run the job.\n --mount MOUNT The list of mounts to mount to the VM, each in the format of SOURCE:TARGET, where SOURCE must be\n either a Google Storage Bucket path (gs://...). [default: []]\n --service-account SERVICE_ACCOUNT\n The service account to run the job.\n --network NETWORK The network to run the job.\n --subnetwork SUBNETWORK\n The subnetwork to run the job.\n --no-external-ip-address\n Whether to disable external IP address for the VM.\n --machine-type MACHINE_TYPE\n The machine type of the VM.\n --provisioning-model {STANDARD,SPOT}\n The provisioning model of the VM.\n --image-uri IMAGE_URI\n The custom image URI of the VM.\n --entrypoint ENTRYPOINT\n The entry point of the container to run the command.\n --commands COMMANDS The list of commands to run in the container, each as a separate string. [default: []]\n --runnables RUNNABLES\n The JSON string of extra settings of runnables add to the job.json.\n Refer to https://cloud.google.com/batch/docs/reference/rest/v1/projects.locations.jobs#Runnable\n for details.\n You can have an extra key \'order\' for each runnable, where negative values mean to run before\n the main command,\n and positive values mean to run after the main command.\n --allocationPolicy ALLOCATIONPOLICY\n The JSON string of extra settings of allocationPolicy add to the job.json. Refer to\n https://cloud.google.com/batch/docs/reference/rest/v1/projects.locations.jobs#AllocationPolicy\n for details. [default: {}]\n --taskGroups TASKGROUPS\n The JSON string of extra settings of taskGroups add to the job.json. Refer to\n https://cloud.google.com/batch/docs/reference/rest/v1/projects.locations.jobs#TaskGroup for\n details. [default: []]\n --labels LABELS The JSON string of labels to add to the job. Refer to\n https://cloud.google.com/batch/docs/reference/rest/v1/projects.locations.jobs#Job.FIELDS.labels\n for details. [default: {}]\n --gcloud GCLOUD The path to the gcloud command. [default: gcloud]\n\nOptions:\n -h, --help show this help message and exit\n --nowait Run the command in a detached mode without waiting for its completion. [default: False]\n --view-logs {all,stdout,stderr}\n View the logs of a job.\n --name NAME The name of the daemon pipeline.\n If not provided, try to generate one from the command to run.\n If the command is also not provided, use \'PipenCliGbatchDaemon\' as the name.\n --profile PROFILE Use the `scheduler_opts` as the Scheduler Options of a given profile from pipen configuration\n files,\n including ~/.pipen.toml and ./pipen.toml.\n Note that if not provided, nothing will be loaded from the configuration files.\n --version Show the version of the pipen-cli-gbatch package. [default: False]\n --loglevel {DEBUG,INFO,WARNING,ERROR,CRITICAL,debug,info,warning,error,critical}\n Set the logging level for the daemon process. [default: INFO]\n\nExamples:\n \u200b\n # Run a command and wait for it to complete\n > pipen gbatch --workdir gs://my-bucket/workdir -- \\\n python myscript.py --input input.txt --output output.txt\n\n # Use named mounts\n > pipen gbatch --workdir gs://my-bucket/workdir --mount INFILE=gs://bucket/path/to/file \\\n --mount OUTDIR=gs://bucket/path/to/outdir -- \\\n cat $INFILE > $OUTDIR/output.txt\n \u200b\n # Run a command in a detached mode\n > pipen gbatch --nowait --project $PROJECT --location $LOCATION \\\n --workdir gs://my-bucket/workdir -- \\\n python myscript.py --input input.txt --output output.txt\n \u200b\n # If you have a profile defined in ~/.pipen.toml or ./.pipen.toml\n > pipen gbatch --profile myprofile -- \\\n python myscript.py --input input.txt --output output.txt\n \u200b\n # View the logs of a previously run command\n > pipen gbatch --view-logs all --name my-daemon-name \\\n --workdir gs://my-bucket/workdir\n```\n\n## API\n\nThe API can also be used to run commands programmatically:\n\n```python\nimport asyncio\nfrom pipen_cli_gbatch import CliGbatchDaemon\n\npipe = CliGbatchDaemon(config_for_daemon, command)\nasyncio.run(pipe.run())\n```\n\nNote that the daemon pipeline will always be running without caching, so that the command will always be executed when the pipeline is run.\n',
23
+ 'author': 'pwwang',
24
+ 'author_email': 'pwwang@pwwang.com',
25
+ 'maintainer': 'None',
26
+ 'maintainer_email': 'None',
27
+ 'url': 'https://github.com/pwwang/pipen-cli-gbatch',
28
+ 'packages': packages,
29
+ 'package_data': package_data,
30
+ 'install_requires': install_requires,
31
+ 'entry_points': entry_points,
32
+ 'python_requires': '>=3.9,<4.0',
33
+ }
34
+
35
+
36
+ setup(**setup_kwargs)