dpdispatcher 0.6.10__py3-none-any.whl → 0.6.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dpdispatcher might be problematic. Click here for more details.

dpdispatcher/_version.py CHANGED
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.6.10'
21
- __version_tuple__ = version_tuple = (0, 6, 10)
31
+ __version__ = version = '0.6.12'
32
+ __version_tuple__ = version_tuple = (0, 6, 12)
33
+
34
+ __commit_id__ = commit_id = None
@@ -161,7 +161,9 @@ class BohriumContext(BaseContext):
161
161
  # return oss_task_zip
162
162
  # api.upload(self.oss_task_dir, zip_task_file)
163
163
 
164
- def download(self, submission):
164
+ def download(
165
+ self, submission, check_exists=False, mark_failure=True, back_error=False
166
+ ):
165
167
  jobs = submission.belonging_jobs
166
168
  job_hashs = {}
167
169
  job_infos = {}
@@ -83,7 +83,7 @@ class LazyLocalContext(BaseContext):
83
83
 
84
84
  def upload(
85
85
  self,
86
- jobs,
86
+ submission,
87
87
  # local_up_files,
88
88
  dereference=True,
89
89
  ):
@@ -91,7 +91,7 @@ class LazyLocalContext(BaseContext):
91
91
 
92
92
  def download(
93
93
  self,
94
- jobs,
94
+ submission,
95
95
  # remote_down_files,
96
96
  check_exists=False,
97
97
  mark_failure=True,
@@ -95,12 +95,13 @@ class OpenAPIContext(BaseContext):
95
95
  raise ValueError(
96
96
  "remote_profile must contain 'project_id' or set environment variable 'BOHRIUM_PROJECT_ID'"
97
97
  )
98
- self.client = Bohrium(
98
+ self.client = Bohrium( # type: ignore[reportPossiblyUnboundVariable]
99
99
  access_key=access_key, project_id=project_id, app_key=app_key
100
100
  )
101
- self.storage = Tiefblue()
102
- self.job = Job(client=self.client)
101
+ self.storage = Tiefblue() # type: ignore[reportPossiblyUnboundVariable]
102
+ self.job = Job(client=self.client) # type: ignore[reportPossiblyUnboundVariable]
103
103
  self.jgid = None
104
+ os.makedirs(DP_CLOUD_SERVER_HOME_DIR, exist_ok=True)
104
105
 
105
106
  @classmethod
106
107
  def load_from_dict(cls, context_dict):
@@ -205,7 +206,9 @@ class OpenAPIContext(BaseContext):
205
206
  # return oss_task_zip
206
207
  # api.upload(self.oss_task_dir, zip_task_file)
207
208
 
208
- def download(self, submission):
209
+ def download(
210
+ self, submission, check_exists=False, mark_failure=True, back_error=False
211
+ ):
209
212
  jobs = submission.belonging_jobs
210
213
  job_hashs = {}
211
214
  job_infos = {}
@@ -45,6 +45,7 @@ class SSHSession:
45
45
  tar_compress=True,
46
46
  look_for_keys=True,
47
47
  execute_command=None,
48
+ proxy_command=None,
48
49
  ):
49
50
  self.hostname = hostname
50
51
  self.username = username
@@ -58,6 +59,7 @@ class SSHSession:
58
59
  self.tar_compress = tar_compress
59
60
  self.look_for_keys = look_for_keys
60
61
  self.execute_command = execute_command
62
+ self.proxy_command = proxy_command
61
63
  self._keyboard_interactive_auth = False
62
64
  self._setup_ssh()
63
65
 
@@ -142,7 +144,12 @@ class SSHSession:
142
144
  # transport = self.ssh.get_transport()
143
145
  sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
144
146
  sock.settimeout(self.timeout)
145
- sock.connect((self.hostname, self.port))
147
+
148
+ # Use ProxyCommand if configured (either directly or via jump host parameters)
149
+ if self.proxy_command is not None:
150
+ sock = paramiko.ProxyCommand(self.proxy_command)
151
+ else:
152
+ sock.connect((self.hostname, self.port))
146
153
 
147
154
  # Make a Paramiko Transport object using the socket
148
155
  ts = paramiko.Transport(sock)
@@ -163,7 +170,6 @@ class SSHSession:
163
170
  if os.path.exists(key_path):
164
171
  for pkey_class in (
165
172
  paramiko.RSAKey,
166
- paramiko.DSSKey,
167
173
  paramiko.ECDSAKey,
168
174
  paramiko.Ed25519Key,
169
175
  ):
@@ -181,7 +187,6 @@ class SSHSession:
181
187
  elif self.look_for_keys:
182
188
  for keytype, name in [
183
189
  (paramiko.RSAKey, "rsa"),
184
- (paramiko.DSSKey, "dsa"),
185
190
  (paramiko.ECDSAKey, "ecdsa"),
186
191
  (paramiko.Ed25519Key, "ed25519"),
187
192
  ]:
@@ -342,6 +347,9 @@ class SSHSession:
342
347
  "enable searching for discoverable private key files in ~/.ssh/"
343
348
  )
344
349
  doc_execute_command = "execute command after ssh connection is established."
350
+ doc_proxy_command = (
351
+ "ProxyCommand to use for SSH connection through intermediate servers."
352
+ )
345
353
  ssh_remote_profile_args = [
346
354
  Argument("hostname", str, optional=False, doc=doc_hostname),
347
355
  Argument("username", str, optional=False, doc=doc_username),
@@ -390,6 +398,13 @@ class SSHSession:
390
398
  default=None,
391
399
  doc=doc_execute_command,
392
400
  ),
401
+ Argument(
402
+ "proxy_command",
403
+ [str, type(None)],
404
+ optional=True,
405
+ default=None,
406
+ doc=doc_proxy_command,
407
+ ),
393
408
  ]
394
409
  ssh_remote_profile_format = Argument(
395
410
  "ssh_session", dict, ssh_remote_profile_args
@@ -398,23 +413,37 @@ class SSHSession:
398
413
 
399
414
  def put(self, from_f, to_f):
400
415
  if self.rsync_available:
416
+ # For rsync, we need to use %h:%p placeholders for target host/port
417
+ proxy_cmd_rsync = None
418
+ if self.proxy_command is not None:
419
+ proxy_cmd_rsync = self.proxy_command.replace(
420
+ f"{self.hostname}:{self.port}", "%h:%p"
421
+ )
401
422
  return rsync(
402
423
  from_f,
403
424
  self.remote + ":" + to_f,
404
425
  port=self.port,
405
426
  key_filename=self.key_filename,
406
427
  timeout=self.timeout,
428
+ proxy_command=proxy_cmd_rsync,
407
429
  )
408
430
  return self.sftp.put(from_f, to_f)
409
431
 
410
432
  def get(self, from_f, to_f):
411
433
  if self.rsync_available:
434
+ # For rsync, we need to use %h:%p placeholders for target host/port
435
+ proxy_cmd_rsync = None
436
+ if self.proxy_command is not None:
437
+ proxy_cmd_rsync = self.proxy_command.replace(
438
+ f"{self.hostname}:{self.port}", "%h:%p"
439
+ )
412
440
  return rsync(
413
441
  self.remote + ":" + from_f,
414
442
  to_f,
415
443
  port=self.port,
416
444
  key_filename=self.key_filename,
417
445
  timeout=self.timeout,
446
+ proxy_command=proxy_cmd_rsync,
418
447
  )
419
448
  return self.sftp.get(from_f, to_f)
420
449
 
@@ -827,8 +856,8 @@ class SSHContext(BaseContext):
827
856
  # print(pid)
828
857
  return {"stdin": stdin, "stdout": stdout, "stderr": stderr}
829
858
 
830
- def check_finish(self, cmd_pipes):
831
- return cmd_pipes["stdout"].channel.exit_status_ready()
859
+ def check_finish(self, proc):
860
+ return proc["stdout"].channel.exit_status_ready()
832
861
 
833
862
  def get_return(self, cmd_pipes):
834
863
  if not self.check_finish(cmd_pipes):
@@ -890,11 +919,11 @@ class SSHContext(BaseContext):
890
919
  # local tar
891
920
  if os.path.isfile(os.path.join(self.local_root, of)):
892
921
  os.remove(os.path.join(self.local_root, of))
893
- with tarfile.open(
922
+ with tarfile.open( # type: ignore[reportCallIssue, reportArgumentType]
894
923
  os.path.join(self.local_root, of),
895
- tarfile_mode,
896
- dereference=dereference,
897
- **kwargs,
924
+ mode=tarfile_mode, # type: ignore[reportArgumentType]
925
+ dereference=dereference, # type: ignore[reportArgumentType]
926
+ **kwargs, # type: ignore[reportArgumentType]
898
927
  ) as tar:
899
928
  # avoid compressing duplicated files or directories
900
929
  for ii in set(files):
dpdispatcher/machine.py CHANGED
@@ -227,7 +227,7 @@ class Machine(metaclass=ABCMeta):
227
227
  return if_recover
228
228
 
229
229
  @abstractmethod
230
- def check_finish_tag(self, **kwargs):
230
+ def check_finish_tag(self, job):
231
231
  raise NotImplementedError(
232
232
  "abstract method check_finish_tag should be implemented by derived class"
233
233
  )
@@ -84,9 +84,6 @@ class JH_UniScheduler(Machine):
84
84
  self.context.write_file(job_id_name, job_id)
85
85
  return job_id
86
86
 
87
- def default_resources(self, resources):
88
- pass
89
-
90
87
  @retry()
91
88
  def check_status(self, job):
92
89
  try:
@@ -67,9 +67,6 @@ class Fugaku(Machine):
67
67
  self.context.write_file(job_id_name, job_id)
68
68
  return job_id
69
69
 
70
- def default_resources(self, resources):
71
- pass
72
-
73
70
  def check_status(self, job):
74
71
  job_id = job.job_id
75
72
  if job_id == "":
@@ -102,9 +102,6 @@ class LSF(Machine):
102
102
  return job_id
103
103
 
104
104
  # TODO: derive abstract methods
105
- def default_resources(self, resources):
106
- pass
107
-
108
105
  def sub_script_cmd(self, res):
109
106
  pass
110
107
 
@@ -64,11 +64,11 @@ class OpenAPI(Machine):
64
64
  raise ValueError(
65
65
  "remote_profile must contain 'project_id' or set environment variable 'BOHRIUM_PROJECT_ID'"
66
66
  )
67
- self.client = Bohrium(
67
+ self.client = Bohrium( # type: ignore[reportPossiblyUnboundVariable]
68
68
  access_key=access_key, project_id=project_id, app_key=app_key
69
69
  )
70
- self.storage = Tiefblue()
71
- self.job = Job(client=self.client)
70
+ self.storage = Tiefblue() # type: ignore[reportPossiblyUnboundVariable]
71
+ self.job = Job(client=self.client) # type: ignore[reportPossiblyUnboundVariable]
72
72
  self.group_id = None
73
73
 
74
74
  def gen_script(self, job):
@@ -69,9 +69,6 @@ class PBS(Machine):
69
69
  self.context.write_file(job_id_name, job_id)
70
70
  return job_id
71
71
 
72
- def default_resources(self, resources):
73
- pass
74
-
75
72
  def check_status(self, job):
76
73
  job_id = job.job_id
77
74
  if job_id == "":
@@ -255,9 +252,6 @@ class SGE(PBS):
255
252
  self.context.write_file(job_id_name, job_id)
256
253
  return job_id
257
254
 
258
- def default_resources(self, resources):
259
- pass
260
-
261
255
  def check_status(self, job):
262
256
  ### https://softpanorama.org/HPC/Grid_engine/Queues/queue_states.shtml
263
257
  job_id = job.job_id
@@ -60,9 +60,6 @@ class Shell(Machine):
60
60
  # self.context.write_file(job_id_name, job_id)
61
61
  # return job_id
62
62
 
63
- def default_resources(self, resources):
64
- pass
65
-
66
63
  def check_status(self, job):
67
64
  job_id = job.job_id
68
65
  # print('shell.check_status.job_id', job_id)
@@ -118,9 +118,6 @@ class Slurm(Machine):
118
118
  self.context.write_file(job_id_name, job_id)
119
119
  return job_id
120
120
 
121
- def default_resources(self, resources):
122
- pass
123
-
124
121
  @retry()
125
122
  def check_status(self, job):
126
123
  job_id = job.job_id
@@ -142,10 +142,10 @@ class Client:
142
142
  res = self.get("/data/get_sts_token", {})
143
143
  # print('debug>>>>>>>>>>>>>', res)
144
144
  dlog.debug(f"debug: _get_oss_bucket: res:{res}")
145
- auth = oss2.StsAuth(
145
+ auth = oss2.StsAuth( # type: ignore[reportPossiblyUnboundVariable]
146
146
  res["AccessKeyId"], res["AccessKeySecret"], res["SecurityToken"]
147
147
  )
148
- return oss2.Bucket(auth, endpoint, bucket_name)
148
+ return oss2.Bucket(auth, endpoint, bucket_name) # type: ignore[reportPossiblyUnboundVariable]
149
149
 
150
150
  def download(self, oss_file, save_file, endpoint, bucket_name):
151
151
  bucket = self._get_oss_bucket(endpoint, bucket_name)
@@ -184,7 +184,7 @@ class Client:
184
184
  )
185
185
  bucket = self._get_oss_bucket(endpoint, bucket_name)
186
186
  total_size = os.path.getsize(zip_task_file)
187
- part_size = determine_part_size(total_size, preferred_size=1000 * 1024)
187
+ part_size = determine_part_size(total_size, preferred_size=1000 * 1024) # type: ignore[reportPossiblyUnboundVariable]
188
188
  upload_id = bucket.init_multipart_upload(oss_task_zip).upload_id
189
189
  parts = []
190
190
  with open(zip_task_file, "rb") as fileobj:
@@ -196,9 +196,9 @@ class Client:
196
196
  oss_task_zip,
197
197
  upload_id,
198
198
  part_number,
199
- SizedFileAdapter(fileobj, num_to_upload),
199
+ SizedFileAdapter(fileobj, num_to_upload), # type: ignore[reportPossiblyUnboundVariable]
200
200
  )
201
- parts.append(PartInfo(part_number, result.etag))
201
+ parts.append(PartInfo(part_number, result.etag)) # type: ignore[reportPossiblyUnboundVariable]
202
202
  offset += num_to_upload
203
203
  part_number += 1
204
204
  # result = bucket.complete_multipart_upload(oss_task_zip, upload_id, parts)
@@ -2,6 +2,7 @@ import base64
2
2
  import hashlib
3
3
  import hmac
4
4
  import os
5
+ import shlex
5
6
  import struct
6
7
  import subprocess
7
8
  import time
@@ -89,6 +90,7 @@ def rsync(
89
90
  port: int = 22,
90
91
  key_filename: Optional[str] = None,
91
92
  timeout: Union[int, float] = 10,
93
+ proxy_command: Optional[str] = None,
92
94
  ):
93
95
  """Call rsync to transfer files.
94
96
 
@@ -104,6 +106,8 @@ def rsync(
104
106
  identity file name
105
107
  timeout : int, default=10
106
108
  timeout for ssh
109
+ proxy_command : str, optional
110
+ ProxyCommand to use for SSH connection
107
111
 
108
112
  Raises
109
113
  ------
@@ -124,20 +128,30 @@ def rsync(
124
128
  ]
125
129
  if key_filename is not None:
126
130
  ssh_cmd.extend(["-i", key_filename])
131
+
132
+ # Use proxy_command if provided
133
+ if proxy_command is not None:
134
+ ssh_cmd.extend(["-o", f"ProxyCommand={proxy_command}"])
135
+
136
+ # Properly escape the SSH command for rsync's -e option
137
+ ssh_cmd_str = " ".join(shlex.quote(part) for part in ssh_cmd)
138
+
127
139
  cmd = [
128
140
  "rsync",
129
- # -a: archieve
130
- # -z: compress
131
- "-az",
141
+ # -r: recursive, -l: links, -p: perms, -t: times, -D: devices/specials
142
+ # -z: compress (exclude -o: owner, -g: group to avoid permission issues)
143
+ "-rlptDz",
132
144
  "-e",
133
- " ".join(ssh_cmd),
145
+ ssh_cmd_str,
134
146
  "-q",
135
147
  from_file,
136
148
  to_file,
137
149
  ]
138
- ret, out, err = run_cmd_with_all_output(cmd, shell=False)
150
+ # Convert to string for shell=True
151
+ cmd_str = " ".join(shlex.quote(arg) for arg in cmd)
152
+ ret, out, err = run_cmd_with_all_output(cmd_str, shell=True)
139
153
  if ret != 0:
140
- raise RuntimeError(f"Failed to run {cmd}: {err}")
154
+ raise RuntimeError(f"Failed to run {cmd_str}: {err}")
141
155
 
142
156
 
143
157
  class RetrySignal(Exception):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dpdispatcher
3
- Version: 0.6.10
3
+ Version: 0.6.12
4
4
  Summary: Generate HPC scheduler systems jobs input scripts, submit these scripts to HPC systems, and poke until they finish
5
5
  Author: DeepModeling
6
6
  License: GNU LESSER GENERAL PUBLIC LICENSE
@@ -1,49 +1,49 @@
1
1
  dpdispatcher/__init__.py,sha256=CLZP_N5CTp14ujWCykEHuJjoIfKR6CwrclXhjWUgNoE,517
2
2
  dpdispatcher/__main__.py,sha256=BFhG-mSBzVZUEezQJqXWZnt2WsnhAHT_zpT8Y6gpOz0,116
3
- dpdispatcher/_version.py,sha256=yBt570c5zFwvrFMCC4v6a1o2lJy1BEDsjaN6VpALgI0,513
3
+ dpdispatcher/_version.py,sha256=a-0leJ4HihsyvhxqpbGKODhfNk5qw_xjChcx488fqoI,706
4
4
  dpdispatcher/arginfo.py,sha256=pNaxYIE6ahBidpR7OCKZdw8iGt003uTXGSlVzwiuvRg,188
5
5
  dpdispatcher/base_context.py,sha256=W4eWDWVzYeL6EuEkivmJp-_h_B2mV9PtRWc09l1_Qzc,5242
6
6
  dpdispatcher/dlog.py,sha256=QJKAwB6gV3Zb6zQUL9dZ_uIoTIEy9Z7ecmVQ-8WNmD8,1081
7
7
  dpdispatcher/dpdisp.py,sha256=jhuTmwPY7KBF4WukaQomEwZcfYoISaMbKwuxdDGSluc,4206
8
- dpdispatcher/machine.py,sha256=k53ycs_v7xrl4D93URc5ht0shoO9NPrVl0rYr4v5OiU,16696
8
+ dpdispatcher/machine.py,sha256=bW4oEpmDKwMHrMESFMgQbUxG3N1xUh3Z4NRY1uxz73I,16691
9
9
  dpdispatcher/run.py,sha256=tFHbJAioXXpgHTE5bhRRAuc8w7cX1ET9SBbiAg3Rw-I,5382
10
10
  dpdispatcher/submission.py,sha256=zLzdKJkMXhvaicD2el33NxDHP_9LL29HBombxR1l-Sw,48086
11
11
  dpdispatcher/contexts/__init__.py,sha256=jlvcIppmUnS39yBlkZEDvIQFV-j_BR75ZTbZALF_RB0,336
12
- dpdispatcher/contexts/dp_cloud_server_context.py,sha256=PGRMef3q2hfK-o5dNIWWvzPca2NK1HrWEgungM4L9Go,12420
12
+ dpdispatcher/contexts/dp_cloud_server_context.py,sha256=pdzQuG-j8sdNsindqwStWwu6OA0fZauCtj7FyWh_014,12491
13
13
  dpdispatcher/contexts/hdfs_context.py,sha256=mYQzXMZ4A9EjjWBAH3Ba6HOErUhMMwCsKxOjpd5R57Y,9105
14
- dpdispatcher/contexts/lazy_local_context.py,sha256=FAClbLD2F4LizUqFzMOg3t0Z6NLeTDLJy7NkRcDELFs,5070
14
+ dpdispatcher/contexts/lazy_local_context.py,sha256=IyDBIKGRz0Ctur4VX1zA78kMi2lf6ZNRcZm_RnFkZSk,5082
15
15
  dpdispatcher/contexts/local_context.py,sha256=VbaSXGAc_EDMT0K5WV_flBF0bX87ntrwO_hq_Bkcb04,14590
16
- dpdispatcher/contexts/openapi_context.py,sha256=uEutwCFiPW0kd2k7dlSZrddDA7tHQSaV7d1-JCDrPQ4,11867
17
- dpdispatcher/contexts/ssh_context.py,sha256=qaj8h2TdY1i-YYdDstUBs9IJaLwzytwnQkdntMEZ7vg,37664
16
+ dpdispatcher/contexts/openapi_context.py,sha256=0DKlgux-1zmUuS28AK1hy_n16n6uCTugI9xjjnKefnI,12140
17
+ dpdispatcher/contexts/ssh_context.py,sha256=0Ah3fdgq0uZxWh4kLS6UbVmaQFgCZ2bsZTwTXVa_k-M,39048
18
18
  dpdispatcher/dpcloudserver/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
19
  dpdispatcher/dpcloudserver/client.py,sha256=k1niKjG6zFnMtHn_UuCjYoOcMju3o3PV-GdyVLr5-KM,165
20
20
  dpdispatcher/entrypoints/__init__.py,sha256=exKSFT3j2oCerGwtI8WbHQK-D0K-CyifocRji1xntT4,20
21
21
  dpdispatcher/entrypoints/gui.py,sha256=29lMXqbmSRbLj4rfBv7Jnw89NLU9syTB88IUP6IRJsU,830
22
22
  dpdispatcher/entrypoints/run.py,sha256=tRkHfeAktV6gF31yb2MVOSTlpNGZFw3N0jHBmM1YfIg,175
23
23
  dpdispatcher/entrypoints/submission.py,sha256=ikVwIZAQL0SsYO5xaMIdKXgO6qtc05w1vqmvtG7Nk5M,3401
24
- dpdispatcher/machines/JH_UniScheduler.py,sha256=ZeUZXqyGrN5Zec4gwpwH5r6FJXaJLRUJMQWDCP7X3Nk,5756
24
+ dpdispatcher/machines/JH_UniScheduler.py,sha256=6A4lnZUIbsR501STkXyKEJCVaq-iJguzlfVuHq9ZRf4,5698
25
25
  dpdispatcher/machines/__init__.py,sha256=tOQuPUlW1Ab4qcC0oSAIyDjZA_WyE67h_EIxPCWGhys,336
26
26
  dpdispatcher/machines/distributed_shell.py,sha256=c0-lGeGz_M-PY2gPciT-uYZLQht5XTMaxJSNxkbMffc,7489
27
27
  dpdispatcher/machines/dp_cloud_server.py,sha256=SR69gsFb2BvOQCW1QnWfP3cQvu_qHLJNsycp5wzosJU,11706
28
- dpdispatcher/machines/fugaku.py,sha256=oY2hD2ldL2dztwtJ9WNisdsfPnaX-5yTRXewIT9r60I,4314
29
- dpdispatcher/machines/lsf.py,sha256=xGDq8OLAk83E9EjK_3-QtEOyahvBGspWbxT__7mnSTw,7896
30
- dpdispatcher/machines/openapi.py,sha256=8unjG9HTCBwbkZTW_t-QKOSaBHfqhzsMARTVVCDGGHw,9929
31
- dpdispatcher/machines/pbs.py,sha256=gUoj3OGQbZRBK4P-WXlhrxlQqTeUi9X8JGLOkAB__wE,12669
32
- dpdispatcher/machines/shell.py,sha256=EeYnRCowXdzO3Nh25Yh_t5xeM6frq4uChk4GVx7OjH8,4797
33
- dpdispatcher/machines/slurm.py,sha256=rN51Qh_u9ZVmjkIClu4Tfc-Qesr19DxPaaeh0FmRx14,15413
28
+ dpdispatcher/machines/fugaku.py,sha256=BlaUfgHoGABROuZeT5byr12rXCVCqahXISL57SUObX0,4256
29
+ dpdispatcher/machines/lsf.py,sha256=BH-lvXGkoCOAYJWAX7boE1TQtPfyMsRjmShIdnZ-MBE,7838
30
+ dpdispatcher/machines/openapi.py,sha256=TXwXGYRB35049MU76-6SBinxSqvh11ey3iWYj8bqEi8,10070
31
+ dpdispatcher/machines/pbs.py,sha256=9QsOKxyGc4fPRxBmFhQ1rlNzRlpmbSe6WvvkZj_0Q0o,12553
32
+ dpdispatcher/machines/shell.py,sha256=hZwrIsT-GzXToCobrrmxY0GBoNy0BEH4oTK5MpQE1H4,4739
33
+ dpdispatcher/machines/slurm.py,sha256=Rf8aV_HxpSLiQ5WC-8nUoGXjAaPsttrGu9ZV6ijYsXg,15355
34
34
  dpdispatcher/utils/__init__.py,sha256=fwvwkMf7DFNQkNBiIce8Y8gRA6FhICwKjkKiXu_BEJg,13
35
35
  dpdispatcher/utils/hdfs_cli.py,sha256=a1a9PJAzt3wsTcdaSw_oD1vcNw59pMooxpAHjYOaaGA,5209
36
36
  dpdispatcher/utils/job_status.py,sha256=Eszs4TPLfszCuf6zLaFonf25feXDUguF28spYOjJpQE,233
37
37
  dpdispatcher/utils/record.py,sha256=c8jdPmCuLzRmFo_jOjR0j9zFR1EWX3NSHVuPEIYCycg,2147
38
- dpdispatcher/utils/utils.py,sha256=Wo-8tGO05e2KkRyLXoIg3UlxzkuM-x1phRrTA1Hh7Ko,5328
38
+ dpdispatcher/utils/utils.py,sha256=CSDzc3VhoF8HvEMaBiH-CSk6WnKFeORXzYcKHhBA4Dg,5940
39
39
  dpdispatcher/utils/dpcloudserver/__init__.py,sha256=FnX9HH-2dXADluNfucg98JPMfruMoBpN9ER9lZkVQvQ,49
40
- dpdispatcher/utils/dpcloudserver/client.py,sha256=fp1e14MTgsMgasZSWowq-NqfCoi21PnCrH1igCQVxFU,12179
40
+ dpdispatcher/utils/dpcloudserver/client.py,sha256=BKKZyY5VblkIace2muFsvSX_7uEgM8E_IZBXaPtaT4I,12414
41
41
  dpdispatcher/utils/dpcloudserver/config.py,sha256=NteQzf1OeEkz2UbkXHHQ0B72cUu23zLVzpM9Yh4v1Cc,559
42
42
  dpdispatcher/utils/dpcloudserver/retcode.py,sha256=1qAF8gFZx55u2sO8KbtYSIIrjcO-IGufEUlwbkSfC1g,721
43
43
  dpdispatcher/utils/dpcloudserver/zip_file.py,sha256=f9WrlktwHW0YipaWg5Y0kxjMZlhD1cJYa6EUpvu4Cro,2611
44
- dpdispatcher-0.6.10.dist-info/licenses/LICENSE,sha256=46mU2C5kSwOnkqkw9XQAJlhBL2JAf1_uCD8lVcXyMRg,7652
45
- dpdispatcher-0.6.10.dist-info/METADATA,sha256=z5f6FV9VK8d-Eq8Lo7yql7bd3m02uleqLX2D9En74rw,12834
46
- dpdispatcher-0.6.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
47
- dpdispatcher-0.6.10.dist-info/entry_points.txt,sha256=NRHUV0IU_u7_XtcmmEDnVzAcUmurhiEAGwENckrajo4,233
48
- dpdispatcher-0.6.10.dist-info/top_level.txt,sha256=35jAQoXY-b-e9fJ1_mxhZUiaCoJNt1ZI7mpFRf07Qjs,13
49
- dpdispatcher-0.6.10.dist-info/RECORD,,
44
+ dpdispatcher-0.6.12.dist-info/licenses/LICENSE,sha256=46mU2C5kSwOnkqkw9XQAJlhBL2JAf1_uCD8lVcXyMRg,7652
45
+ dpdispatcher-0.6.12.dist-info/METADATA,sha256=1tBNA2PlJdHyxL8tbhO6G2XqpsaZ4tlBohcdUpEGfRc,12834
46
+ dpdispatcher-0.6.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
47
+ dpdispatcher-0.6.12.dist-info/entry_points.txt,sha256=NRHUV0IU_u7_XtcmmEDnVzAcUmurhiEAGwENckrajo4,233
48
+ dpdispatcher-0.6.12.dist-info/top_level.txt,sha256=35jAQoXY-b-e9fJ1_mxhZUiaCoJNt1ZI7mpFRf07Qjs,13
49
+ dpdispatcher-0.6.12.dist-info/RECORD,,