ciocore 6.3.1rc3__py2.py3-none-any.whl → 6.4.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ciocore might be problematic. Click here for more details.

ciocore/VERSION CHANGED
@@ -1 +1 @@
1
- 6.3.1-rc.3
1
+ 6.4.0
ciocore/__about__.py CHANGED
@@ -2,26 +2,9 @@ import os
2
2
 
3
3
  __all__ = ["__version__"]
4
4
 
5
- # Get the version from the VERSION file
6
- # The VERSION file may be in the current directory or (in dev) one directory up
7
-
8
5
  try:
9
- with open(
10
- os.path.join(os.path.abspath(os.path.dirname(__file__)), "VERSION"),
11
- "r",
12
- encoding="utf-8",
13
- ) as version_file:
6
+ with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'VERSION')) as version_file:
14
7
  __version__ = version_file.read().strip()
15
-
8
+
16
9
  except IOError:
17
- try:
18
- with open(
19
- os.path.join(
20
- os.path.abspath(os.path.dirname(os.path.dirname(__file__))), "VERSION"
21
- ),
22
- "r",
23
- encoding="utf-8",
24
- ) as version_file:
25
- __version__ = version_file.read().strip()
26
- except IOError:
27
- __version__ = "dev"
10
+ __version__ = 'dev'
@@ -0,0 +1,3 @@
1
+ """
2
+ Commandline tools.
3
+ """
@@ -0,0 +1,206 @@
1
+ #!/usr/bin/env python
2
+ # Use absolute imports to avoid a "conductor" name clash (this module name vs conductor package).
3
+ from __future__ import absolute_import
4
+
5
+ import argparse
6
+ import os
7
+ import sys
8
+
9
+ sys.path.append(os.path.dirname(os.path.dirname(__file__)))
10
+
11
+ from ciocore import downloader, uploader, loggeria, config
12
+
13
+ def build_parser():
14
+ cfg = config.config().config
15
+
16
+ # Create a parent parser. Arguments that are common across all subparsers can be added to this parser
17
+ parent_parser = argparse.ArgumentParser(add_help=False)
18
+
19
+ # create the main parser. Not sure why this parser is required, but got parsing tracebacks when excluding it (it gets confused about the arguments provided)
20
+ parser = argparse.ArgumentParser(description="description")
21
+ subparsers = parser.add_subparsers(title="actions")
22
+
23
+
24
+ #############################
25
+ # UPLOADER PARSER
26
+ #############################
27
+ uploader_parser_desciption = "parse uploader arguments"
28
+ uploader_parser_help = ("Starts the Uploader in a continous running mode, polling Conductor for "
29
+ "paths to upload unless a list of paths are provided."
30
+ )
31
+
32
+ uploader_parser = subparsers.add_parser("uploader", parents=[parent_parser],
33
+ help=uploader_parser_help,
34
+ description=uploader_parser_desciption,
35
+ formatter_class=argparse.RawTextHelpFormatter)
36
+
37
+ uploader_parser.add_argument("--database_filepath",
38
+ help=("The filepath to the local md5 caching database. If no filepath "
39
+ "is specified, the database will be created in a temp directory"))
40
+
41
+ uploader_parser.add_argument("--location",
42
+ help=('An optional string to indicate which location this uploader '
43
+ 'executable should register as. This option is only relevant '
44
+ 'for conductor accounts which submits jobs from different locations '
45
+ '(e.g. differing geographic locations or office locations that have differing file systems).'
46
+ ' Typically each location would have its own conductor uploader process running. This location '
47
+ 'string allows each uploader to target specific upload jobs (files to upload) that are appropriate '
48
+ 'for it. This is potentially useful as each location may have differing file systems '
49
+ 'available to it (e.g. uploader1 has /filesystem1 available to it, but uploader2 only '
50
+ 'has /filesystem2 available to it). In this case uploader1 should only upload files '
51
+ 'that exist on /filesystem1 and uploader2 should only upload files that exist on /filesystem2. '
52
+ 'This is achieved by including a location argument (such as "location1" or "location2") '
53
+ 'when submitting jobs, as well as when launching this uploader command.'))
54
+
55
+ uploader_parser.add_argument("--md5_caching",
56
+ help=("Use cached md5s. This can dramatically improve the uploading "
57
+ "times, as md5 checking can be very time consuming. Caching md5s "
58
+ "allows subsequent uploads (of the same files) to skip the "
59
+ "md5 generation process (if the files appear to not have been "
60
+ "modified since the last time they were submitted). The cache is "
61
+ "stored locally and uses a file's modification time and file size "
62
+ "to intelligently guess whether the file has changed. Set this "
63
+ "flag to False if there is concern that files may not be getting "
64
+ "re-uploaded properly"),
65
+ choices=[False, True],
66
+ type=cast_to_bool,
67
+ default=None)
68
+
69
+ uploader_parser.add_argument("--log_level",
70
+ choices=loggeria.LEVELS,
71
+ help="The logging level to display")
72
+
73
+ uploader_parser.add_argument("--log_dir",
74
+ help=("When provided, will write a log file to "
75
+ "the provided directory. This will be a "
76
+ "rotating log, creating a new log file "
77
+ "everyday, while storing the last 7 days "
78
+ "of logs"))
79
+
80
+ uploader_parser.add_argument("--thread_count",
81
+ type=int,
82
+ default=cfg["thread_count"],
83
+ help=('The number of threads that should download simultaneously'))
84
+
85
+ uploader_parser.add_argument("--paths",
86
+ type=str,
87
+ action="append",
88
+ nargs="+",
89
+ help=('A list of explicit paths to upload. Paths with spaces and/or special characters should be encapsulated in quotes'))
90
+
91
+ uploader_parser.set_defaults(func=run_uploader)
92
+
93
+ #############################
94
+ # DOWNLOADER PARSER
95
+ #############################
96
+
97
+ downloader_parser_desciption = "parse downloader arguments"
98
+ downloader_parser_help = ""
99
+
100
+ downloader_parser = subparsers.add_parser("downloader", parents=[parent_parser],
101
+ help=downloader_parser_help,
102
+ description=downloader_parser_desciption,
103
+ formatter_class=argparse.RawTextHelpFormatter)
104
+
105
+ downloader_parser.add_argument("--job_id",
106
+ help=("The job id(s) to download. When specified "
107
+ "will only download those jobs and terminate "
108
+ "afterwards"),
109
+ action='append')
110
+
111
+ downloader_parser.add_argument("--task_id",
112
+ help="Manually download output for specific tasks - use a comma-separated list of tasks if you wish")
113
+
114
+ downloader_parser.add_argument("--output",
115
+ help="Override for the output directory")
116
+
117
+ downloader_parser.add_argument("--location",
118
+ help=('An optional string to indicate which location this downloader '
119
+ 'executable should register as. This option is only relevant for '
120
+ 'conductor accounts which submits jobs from different locations '
121
+ '(e.g. differing geographic locations or office locations that '
122
+ 'have differing file systems). Typically each location would '
123
+ 'have its own conductor downloader process running. This location '
124
+ 'argument allows each downloader to target specific jobs (to '
125
+ 'download upon job-completion) that match its appropriate location. '
126
+ 'Essentially this allows the location of which a job was submitted '
127
+ 'from to also be the destination in which to deliver completed '
128
+ 'renders to (which would typically be the desired behavior).'))
129
+
130
+ downloader_parser.add_argument("--project",
131
+ help=('An optional string to indicate which project that this downloader executable should register as.'))
132
+
133
+ downloader_parser.add_argument("--log_level",
134
+ choices=loggeria.LEVELS,
135
+ default=cfg["log_level"],
136
+ help="The logging level to display")
137
+
138
+ downloader_parser.add_argument("--log_dir",
139
+ help=("When provided, will write a log file to "
140
+ "the provided directory. This will be a "
141
+ "rotating log, creating a new log file "
142
+ "everyday, while storing the last 7 days "
143
+ "of logs"))
144
+
145
+ downloader_parser.add_argument("--thread_count",
146
+ type=int,
147
+ default=cfg["thread_count"],
148
+ help=('The number of threads that should download simultaneously'))
149
+
150
+ downloader_parser.add_argument("--alt",
151
+ help=('Run an alternative version of the downloader'),
152
+ action='store_true')
153
+
154
+ downloader_parser.set_defaults(func=run_downloader)
155
+
156
+ return parser
157
+
158
+
159
+ def cast_to_bool(string):
160
+ '''
161
+ Ensure that the argument provided is either "True" or "False (or "true" or
162
+ "false") and convert that argument to an actual bool value (True or False).
163
+ '''
164
+ string_lower = string.lower()
165
+ if string_lower == "true":
166
+ return True
167
+ elif string_lower == "false":
168
+ return False
169
+ raise argparse.ArgumentTypeError('Argument must be True or False')
170
+
171
+ def run_uploader(args):
172
+ uploader.run_uploader(args)
173
+
174
+
175
+ def run_downloader(args):
176
+ '''
177
+ Convert the argparse Namespace object to a dictionary and run the downloader
178
+ with the given args.
179
+ '''
180
+ # Convert Namespace args object to args dict
181
+ args_dict = vars(args)
182
+
183
+ if args_dict.get("task_id") and not args_dict.get("job_id"):
184
+ raise argparse.ArgumentTypeError('Must supply a job_id with task_id.')
185
+
186
+ # New downloader broke in python 3. It was used only for linux and in
187
+ # daemon mode, so for now we'll use the old downloader for everything.
188
+
189
+ return downloader.run_downloader(args_dict)
190
+
191
+
192
+ def main():
193
+ parser = build_parser()
194
+ args = parser.parse_args()
195
+ # Handle calling the script without an argument, fixes argparse issue
196
+ # https://bugs.python.org/issue16308
197
+ try:
198
+ func = args.func
199
+ except AttributeError:
200
+ parser.error("too few arguments")
201
+ func(args)
202
+
203
+
204
+ if __name__ == '__main__':
205
+ main()
206
+
ciocore/downloader.py CHANGED
@@ -18,10 +18,10 @@ import time
18
18
  import threading
19
19
  import hashlib
20
20
 
21
+ import ciocore
21
22
  from ciocore import config
22
23
  from ciocore import api_client, common, loggeria
23
24
  from ciocore.common import CONDUCTOR_LOGGER_NAME
24
- from cioseq.sequence import Sequence
25
25
 
26
26
  try:
27
27
  import Queue as queue
@@ -291,12 +291,12 @@ class Downloader(object):
291
291
  downloader.print_uptime()
292
292
 
293
293
  @classmethod
294
- def download_jobs(cls, job_ids, thread_count=None, output_dir=None):
294
+ def download_jobs(cls, job_ids, task_id=None, thread_count=None, output_dir=None):
295
295
  """
296
296
  Run the downloader for explicit jobs, and terminate afterwards.
297
297
  """
298
298
  downloader = cls(thread_count=thread_count, output_dir=output_dir)
299
- downloader.start(job_ids)
299
+ thread_states = downloader.start(job_ids, task_id=task_id)
300
300
  while not common.SIGINT_EXIT and (
301
301
  not downloader.pending_queue.empty() or not downloader.downloading_queue.empty()
302
302
  ):
@@ -306,7 +306,7 @@ class Downloader(object):
306
306
  downloader._print_download_history()
307
307
  downloader.print_uptime()
308
308
 
309
- def start(self, job_ids=None, summary_interval=10):
309
+ def start(self, job_ids=None, task_id=None, summary_interval=10):
310
310
  # Create new queues
311
311
  self.start_time = time.time()
312
312
  self.pending_queue = queue.Queue()
@@ -316,7 +316,7 @@ class Downloader(object):
316
316
  # If a job id has been specified then only load the queue up with that work
317
317
  if job_ids:
318
318
  self.history_queue_max = None
319
- self.get_jobs_downloads(job_ids)
319
+ self.get_jobs_downloads(job_ids, task_id)
320
320
 
321
321
  # otherwise create a queue thread the polls the app for wor
322
322
  else:
@@ -461,12 +461,10 @@ class Downloader(object):
461
461
  # Wait for the queue to be consumed before querying for more
462
462
  self.nap()
463
463
 
464
-
465
464
  def nap(self):
466
465
  while not common.SIGINT_EXIT:
467
466
 
468
467
  time.sleep(self.naptime)
469
- # Pretty sure this return should be unindented!
470
468
  return
471
469
 
472
470
  @common.dec_timer_exit(log_level=logging.DEBUG)
@@ -479,51 +477,24 @@ class Downloader(object):
479
477
  except Exception as e:
480
478
  logger.exception("Could not get next download")
481
479
 
482
- def get_jobs_downloads(self, job_ids):
480
+ def get_jobs_downloads(self, job_ids, task_id):
483
481
  """
484
- Download jobs, each with an optional task specification.
485
-
486
- Job ids are padded to 5 digits, and task ids are padded to 3 digits.
482
+ Get each job and optional comma-separated task list.
487
483
 
488
- job_ids: (tuple) may take any of the following forms:
489
- 1. A single job id, e.g. (01234,)
490
- 2. It doesn't have to be padded, e.g. (1234,)
491
- 3. A jobs with a task specification, e.g. (01234:1-3,5,7-9)
492
- 4. Several of jobs:tasks, e.g. (01234, 56789:1-3,5,7-9)
493
-
494
- """
495
- jobs = self._flatten(job_ids)
496
- for job in jobs:
497
- endpoint = self.endpoint_downloads_job % job["job_id"]
498
-
499
- for tid in job["tasks"] or [None]:
500
- downloads = _get_job_download(endpoint, self.api_client, job["job_id"], tid)
484
+ There will only be a task list if there is just one job, due to earlier arg validation.
485
+
486
+ If there is no task list, _get_job_download is called with tid=None (i.e. the whole job)
487
+ """
488
+ task_ids = [t for t in task_id.split(",") if t] if task_id else [None]
489
+
490
+ for job_id in job_ids:
491
+ endpoint = self.endpoint_downloads_job % job_id
492
+ for tid in task_ids:
493
+ downloads = _get_job_download(endpoint, self.api_client, job_id, tid)
501
494
  if downloads:
502
495
  for task_download in downloads.get("downloads", []):
503
496
  print("putting in queue: %s" % task_download)
504
497
  self.pending_queue.put(task_download, block=True)
505
-
506
- @staticmethod
507
- def _flatten(job_ids):
508
- """Create a list of job objects with keys: job_id and tasks.
509
-
510
- see: get_jobs_downloads() function see tests/test_downloader.py for examples.
511
- """
512
- result = []
513
- for job_id in job_ids:
514
- if ":" in job_id:
515
- job_id, range_spec = job_id.split(":")
516
- try:
517
- seq = Sequence.create(range_spec)
518
- tasks = seq.expand("###")
519
- except (ValueError, TypeError):
520
- tasks = None
521
- else:
522
- tasks = None
523
- result.append({"job_id": job_id.zfill(5), "tasks": tasks})
524
- return result
525
-
526
-
527
498
 
528
499
  @common.dec_catch_exception(raise_=True)
529
500
  def download_target(self, pending_queue, downloading_queue, task_download_state):
@@ -1249,6 +1220,53 @@ def _in_queue(the_queue, item_dict, key):
1249
1220
  if item[key] == item_dict[key]:
1250
1221
  return True
1251
1222
 
1223
+
1224
+ def run_downloader(args):
1225
+ """
1226
+ Start the downloader. If a job id(s) were given, exit the downloader upon completion.
1227
+ Otherwise, run the downloader indefinitely (daemon mode), polling the Conductor cloud app for
1228
+ files that need to be downloaded.
1229
+ """
1230
+
1231
+ # Set up logging
1232
+ log_level_name = args.get("log_level")
1233
+ log_level = loggeria.LEVEL_MAP.get(log_level_name)
1234
+ log_dirpath = args.get("log_dir")
1235
+ set_logging(log_level, log_dirpath)
1236
+
1237
+ api_client.ApiClient.register_client(client_name = Downloader.CLIENT_NAME, client_version=ciocore.__version__)
1238
+
1239
+ logger.debug("Downloader args: %s", args)
1240
+
1241
+ job_ids = args.get("job_id")
1242
+ thread_count = args.get("thread_count")
1243
+
1244
+ if job_ids:
1245
+ Downloader.download_jobs(
1246
+ job_ids,
1247
+ task_id=args.get("task_id"),
1248
+ thread_count=thread_count,
1249
+ output_dir=args.get("output"),
1250
+ )
1251
+
1252
+ else:
1253
+ Downloader.start_daemon(
1254
+ thread_count=thread_count, location=args.get("location"), output_dir=args.get("output")
1255
+ )
1256
+
1257
+
1258
+ def set_logging(level=None, log_dirpath=None):
1259
+ log_filepath = None
1260
+ if log_dirpath:
1261
+ log_filepath = os.path.join(log_dirpath, "conductor_dl_log")
1262
+ loggeria.setup_conductor_logging(
1263
+ logger_level=level,
1264
+ console_formatter=LOG_FORMATTER,
1265
+ file_formatter=LOG_FORMATTER,
1266
+ log_filepath=log_filepath,
1267
+ )
1268
+
1269
+
1252
1270
  def report_error(self, download_id, error_message):
1253
1271
  try:
1254
1272
  logger.error("failing upload due to: \n%s" % error_message)
@@ -728,12 +728,6 @@ class Uploader(object):
728
728
 
729
729
  return True
730
730
 
731
- def assets_only(self, *paths):
732
- processed_filepaths = file_utils.process_upload_filepaths(paths)
733
- file_map = {path: None for path in processed_filepaths}
734
- self.handle_upload_response(project=None, upload_files=file_map)
735
-
736
-
737
731
  def handle_upload_response(self, project, upload_files, upload_id=None):
738
732
  """
739
733
  This is a really confusing method and should probably be split into to clear logic
@@ -916,6 +910,7 @@ def set_logging(level=None, log_dirpath=None):
916
910
  log_filepath=log_filepath,
917
911
  )
918
912
 
913
+
919
914
  def run_uploader(args):
920
915
  """
921
916
  Start the uploader process. This process will run indefinitely, polling
@@ -945,6 +940,7 @@ def run_uploader(args):
945
940
  else:
946
941
  uploader.main()
947
942
 
943
+
948
944
  def get_file_info(filepath):
949
945
  """
950
946
  For the given filepath return the following information in a dictionary:
@@ -0,0 +1,19 @@
1
+ #!python
2
+ # -*- coding: utf-8 -*-
3
+ # WHY CHANGE
4
+
5
+ # DO NOT SORT IMPORTS
6
+ import os
7
+ import re
8
+ import sys
9
+
10
+ CIO_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
11
+ sys.path.insert(0, CIO_DIR)
12
+
13
+ print("Inserted {} into sys.path".format(CIO_DIR))
14
+
15
+ from ciocore.cli.conductor import main
16
+
17
+ if __name__ == '__main__':
18
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
19
+ sys.exit(main())
@@ -0,0 +1,13 @@
1
+ @echo off
2
+
3
+ echo Looking for Python...
4
+
5
+ WHERE python.exe
6
+ if ERRORLEVEL 1 (
7
+ echo WARNING: python.exe not found.
8
+ echo Please find or install Python and add it to your Path variable. Then run this command again.
9
+ echo If you don't want to add python to your Path, then you can use the full path to a Python installation.
10
+ echo Example: "C:\some\tools\python\python.exe" "%~dp0conductor %*
11
+ exit /b
12
+ )
13
+ python.exe "%~dp0\conductor" %*
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ciocore
3
- Version: 6.3.1rc3
3
+ Version: 6.4.0
4
4
  Summary: Core functionality for Conductor's client tools
5
5
  Home-page: https://github.com/ConductorTechnologies/ciocore
6
6
  Author: conductor
@@ -9,23 +9,21 @@ Classifier: Operating System :: OS Independent
9
9
  Classifier: Programming Language :: Python
10
10
  Classifier: Topic :: Multimedia :: Graphics :: 3D Rendering
11
11
  Description-Content-Type: text/markdown
12
- Requires-Dist: certifi (==2019.11.28)
13
- Requires-Dist: pyjwt (==1.7.1)
14
- Requires-Dist: requests (<=2.25.1,>=2.10.0)
15
- Requires-Dist: future (>=0.18.2)
16
- Requires-Dist: urllib3 (==1.25.11)
17
- Requires-Dist: cioseq (<1.0.0,>=0.1.14)
18
- Requires-Dist: Click
19
- Requires-Dist: markdown
12
+ Requires-Dist: certifi ==2019.11.28
13
+ Requires-Dist: pyjwt ==1.7.1
14
+ Requires-Dist: requests <=2.25.1,>=2.10.0
15
+ Requires-Dist: future >=0.18.2
16
+ Requires-Dist: urllib3 ==1.25.11
20
17
 
21
- ## Introduction
18
+ # Conductor Core
22
19
 
23
- Conductor Core is a Python package that provides an API and a command-line interface for interacting with Conductor.
20
+ Use the command-line uploader and downloader or develop your own tools using the Python API.
24
21
 
25
22
  ## Install
26
23
 
24
+ **To install the latest version.**
27
25
  ```bash
28
- pip install ciocore
26
+ pip install --upgrade ciocore
29
27
  ```
30
28
 
31
29
  Run the conductor command to confirm the package was installed.
@@ -33,13 +31,6 @@ Run the conductor command to confirm the package was installed.
33
31
  conductor --help
34
32
  ```
35
33
 
36
- Get help for a specific sub command
37
- ```bash
38
- conductor downloader --help
39
- conductor uploader --help
40
- conductor software --help
41
- ```
42
-
43
34
  Example API usage
44
35
  ```python
45
36
  from ciocore import api_client
@@ -52,20 +43,9 @@ See [CONTRIBUTING](CONTRIBUTING.md)
52
43
 
53
44
  ## Changelog
54
45
 
55
- ## Version:6.3.1-rc.3 -- 24 Jul 2023
56
-
57
- * This release has an improved commandline experience. The new subcommands, `upload` and
58
- `download` have a more idiomatic feel, and better control over what gets uploaded or downloaded.
59
- The old commands `uploader` and `downloader` are still available, but are deprecated.
60
- * There's also now a `packages` subcommand that allows you to query the package database.
61
- * The command installs with console_scripts which improves stability and avoids situations wehere
62
- the Conductor command cannot be3 found. (#72)
63
- * Adds a `--version` flag
64
-
65
- ## Version:6.3.1-rc.2 -- 23 Jul 2023
46
+ ## Version:6.4.0 -- 12 Sep 2023
66
47
 
67
- * Fixes missing import.
68
- * Improves uploader error handling and queueing fixes.
48
+ * Uploader now supports callbacks and is interruptable
69
49
 
70
50
  ## Version:6.3.0 -- 05 Jul 2023
71
51
 
@@ -1,40 +1,40 @@
1
- ciocore/VERSION,sha256=YPDeKbDykk0XdwIXsKbLtvYyzEWPnsY8mq21KfcUpok,10
2
- ciocore/__about__.py,sha256=w23ToSZc0RpHPxOe-gPr1qA_yFqOFY0WvrujXwGisTk,717
1
+ ciocore/VERSION,sha256=Yl4u2S_TJL-hz2vF8sZWuwgSXkwNAQL0sIXjVAPmLZU,5
2
+ ciocore/__about__.py,sha256=nTb4Xx0r9QtGROSFKgwDZ-Mr2LKjm2wVt1OkMQAkRAQ,241
3
3
  ciocore/__init__.py,sha256=zB_e7gBW2QloqbrGzcvccfpZhOhd6o9KACnScrwylm8,25
4
4
  ciocore/api_client.py,sha256=O9uvtnqgmE2IRkqlP9DoPn6CX_HAIuSLo9je5Om-800,17913
5
- ciocore/cli.py,sha256=ivAsgWKrws5GFJh0qFlme_P5Dqq5k6KiWB32Qg3KsVc,11313
6
5
  ciocore/client_db.py,sha256=tTz3bl2xeDPPcYSDS3g3QgV_xYihJMx0Kj6OeN2klK0,12978
7
6
  ciocore/common.py,sha256=FnggAL-IGW8VQB6kLsrKLnxWFgLpwGBhOtiZSHz130M,14725
8
7
  ciocore/compat.py,sha256=5uEXPSog_jxsDMaHBswAKEtfyXT25VgU6WNGIhz9PHU,256
9
8
  ciocore/conductor_submit.py,sha256=ONE0LsA5hGavTJIOXXYx8qzl8_vBPADwhd6Ytq_0E0c,9382
10
9
  ciocore/config.py,sha256=fcxnwaYmNAmvMXQeb5V-86Pwwy3a3NTPCA0CSLqog3s,5913
11
10
  ciocore/data.py,sha256=G6tAfSkMwy3DS_fesqkDaoMgrfd_JrvV8Fs_JFpcCCc,8950
12
- ciocore/downloader.py,sha256=Q5DCN3I6bvGsVoJir4XisOjlZVTQV41fgJg0H4Obagw,50636
11
+ ciocore/downloader.py,sha256=oRmRePdIx_GdQwM39ipl9_e2c2ZkcGJRanuHOZmqpTM,51147
13
12
  ciocore/exceptions.py,sha256=4Oq-WX-qiN6kPUdBCHvvd6mtSQ0nCkDbJxWt2CNtpv8,1504
14
13
  ciocore/file_utils.py,sha256=bAlL31B4YkRgX-yT8kF8UXBFktQlsE1PvxbKqTeAeOU,17174
15
14
  ciocore/hardware_set.py,sha256=txcSLrVSNewRGxKLye-wuM8szGMVloU29ktL8WHdUtM,5401
16
15
  ciocore/loggeria.py,sha256=dKKJC8ZtRZdghqD5R5XrA6eDoy8gKacfeTA-zNzXvDE,13482
17
16
  ciocore/package_environment.py,sha256=oEbNKXRtPSPzKR-yCoKtvgzu4OCmr-zaqAcNoLAN9Uk,7238
18
- ciocore/package_query.py,sha256=2m5EBXfu1lmqupZrFF8f8mfkX_PgijpdMxCtFI5e5s0,5574
19
17
  ciocore/package_tree.py,sha256=kH03HVfjomj7nsaxJJtr-1KSQ_9ZSQY5msG_l9btvg8,16277
20
18
  ciocore/post_install.py,sha256=zu5Ctz2ANbKD-f5G2ODLIhKkWENBi4F3UKKu50OEWrg,1000
21
19
  ciocore/validator.py,sha256=f_K7gxz122W_i5AxVx6dKhckOygl8TnmQiVj7tyX5zw,2344
22
20
  ciocore/worker.py,sha256=_WQq2_ZhXuc5NTfYQtcWUGULU6IUZykeVW5e_4_ffZc,21728
23
21
  ciocore/auth/__init__.py,sha256=cdS-xZzMq41yXM5cz8sUlcYgo8CJYh8HcCCWmhbDgf0,606
24
22
  ciocore/auth/server.py,sha256=8btX9-EokUl6q55V8muDmEV2tvvbTBD0BHeWFbwkzUc,3892
23
+ ciocore/cli/__init__.py,sha256=RmZKWJaMpzNyMdyYc2W3VXglaJiC8vyR2cgUlA-9Qmw,26
24
+ ciocore/cli/conductor.py,sha256=snmlICkMgP94ZPKl6J7g299deB75QwGDWI0oCnZPfSI,10861
25
25
  ciocore/uploader/__init__.py,sha256=hxRFJf5Lo86rtRObFXSjjot8nybQd-SebSfYCbgZwow,24
26
- ciocore/uploader/_uploader.py,sha256=NDtoSgfUkmHCY6h0yypiPrwGH6nJwTORp_LptHCiv28,37675
26
+ ciocore/uploader/_uploader.py,sha256=31lpXVp2VG6rzVF7nw24Uafs6Z-p9CUF_bghku386R4,37430
27
27
  ciocore/uploader/upload_stats/__init__.py,sha256=kc1YCXZOy4EBpSSRIJKe89DWgiBwTzOPMbZ7530EXWw,5354
28
28
  ciocore/uploader/upload_stats/stats_formats.py,sha256=dtjsxdbT4w6BXFay19dzijf-xLwf6TktNk75Wq3rPAw,2452
29
+ ciocore-6.4.0.data/scripts/conductor,sha256=Nk3QsLQqbUUrtaKDp4b5mr9__4tz-xnssENpQe5vuIo,409
30
+ ciocore-6.4.0.data/scripts/conductor.bat,sha256=T1_9ByheubBczgQZn8_LwfvMtWgE7Bt64EsEScnSXMs,447
29
31
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
32
  tests/instance_type_fixtures.py,sha256=gR6ordfghp34I94oM2eDg6jfIe5HAwE644GKIwspuW8,3469
31
33
  tests/package_fixtures.py,sha256=CsJnhB7oYzIxJH7b1tCOPyvnnVSCqEbSPhtCnsHL-nA,5070
32
34
  tests/test_api_client.py,sha256=dWOmAIKmX0gLaDGsT9VfAq9Hcs9HIQf2P5IMM_Bt5dE,1284
33
- tests/test_cli.py,sha256=Td8kUgIz9GhCG0FxbL66Kohf4tpJRhQnOFE_tZSe-8k,5970
34
35
  tests/test_common.py,sha256=lJpzRdL-7u4McXFbLuwPQQoUnuEOnCVQtZEt6e_dIYs,638
35
36
  tests/test_config.py,sha256=nSmpinX2SmDNAprIcxs9UHdB0VakJB0snXaZmAoKJSc,12863
36
37
  tests/test_data.py,sha256=YdP1kZJivQ6yb9z96UK6oMDaOfJAl4YMJqzKvlCQaes,5744
37
- tests/test_downloader.py,sha256=Xr6-y3SjfTAo0JM2VDGoSx-nEtRj3BY-Q4JtSNFl3ro,2090
38
38
  tests/test_hardware_set.py,sha256=TcBh63rOxf1rKXxKlCPSnHueBFlz7rNP6BcoJjgVvPs,3065
39
39
  tests/test_imports_2and3.py,sha256=ehqpRYPVY7djBcb8OT_cnh86iCJJ9wuMWnfSR9RHxmY,507
40
40
  tests/test_package_environment.py,sha256=CdiC2PDVSnbcwTb4fsDTWqGYSzs1n5ca2KMoyISckGA,5893
@@ -45,8 +45,7 @@ tests/test_validator.py,sha256=2fY66ayNc08PGyj2vTI-V_1yeCWJDngkj2zkUM5TTCI,1526
45
45
  tests/mocks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
46
  tests/mocks/api_client_mock.py,sha256=Wfv2JPFSZfyHftVqsqxcpXWJn136pEHx26I_esz567E,943
47
47
  tests/mocks/glob.py,sha256=J2MH7nqi6NJOHuGdVWxhfeBd700_Ckj6cLh_8jSNkfg,215
48
- ciocore-6.3.1rc3.dist-info/METADATA,sha256=q6QZYoIeKZYL7oAHY4Ztb_kUzL4Xj6hfN5-65kYlAAw,15189
49
- ciocore-6.3.1rc3.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
50
- ciocore-6.3.1rc3.dist-info/entry_points.txt,sha256=cCqcALMYbC4d8545V9w0Zysfg9MVuKWhzDQ2er4UfGE,47
51
- ciocore-6.3.1rc3.dist-info/top_level.txt,sha256=SvlM5JlqULzAz00JZWfiUhfjhqDzYzSWssA87zdJl0o,14
52
- ciocore-6.3.1rc3.dist-info/RECORD,,
48
+ ciocore-6.4.0.dist-info/METADATA,sha256=ObyBNodNf9WIBCAKwIsWMLC5NAOfsCZhpLFdniY7-_0,14364
49
+ ciocore-6.4.0.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110
50
+ ciocore-6.4.0.dist-info/top_level.txt,sha256=SvlM5JlqULzAz00JZWfiUhfjhqDzYzSWssA87zdJl0o,14
51
+ ciocore-6.4.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.40.0)
2
+ Generator: bdist_wheel (0.41.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py2-none-any
5
5
  Tag: py3-none-any
ciocore/cli.py DELETED
@@ -1,300 +0,0 @@
1
- import os
2
- import sys
3
- import click
4
- import logging
5
-
6
- from ciocore import (
7
- loggeria,
8
- config,
9
- api_client,
10
- package_query,
11
- )
12
-
13
- from ciocore import __version__ as VERSION
14
-
15
- from ciocore.uploader import Uploader
16
- from ciocore.downloader import Downloader
17
-
18
- logger = logging.getLogger("conductor cli")
19
-
20
- LOG_LEVEL_HELP = """The logging level to display"""
21
-
22
- LOG_DIR_HELP = """
23
- Write a log file to the given directory. The log rotates, creating a new log file every day, while
24
- storing logs for the last 7 days. The log file is named conductor_ul.log
25
- """
26
-
27
- THREADS_HELP = """The number of threads that should download simultaneously"""
28
-
29
- UPLOADER_DATABASE_FILEPATH_HELP = (
30
- "Specify a filepath to the local md5 caching database."
31
- )
32
- LOCATION_HELP = """
33
- Specify a location tag to associate with uploads, downloads, and submissions. A location tag allows
34
- you to limit the scope of your uploads and downloads to jobs sharing the same location tag. This is
35
- useful while using the uploader or downloader in daemon mode.
36
- """
37
- UPLOADER_MD5_CACHING_HELP = """
38
- Use cached md5s. This can dramatically improve the uploading times, as md5 checking can be very
39
- time consuming. Caching md5s allows subsequent uploads (of the same files) to skip the md5
40
- generation process (if the files appear to not have been modified since the last time they were
41
- submitted). The cache is stored locally and uses a file's modification time and file size to
42
- intelligently guess whether the file has changed. Set this flag to False if there is concern that
43
- files may not be getting re-uploaded properly
44
- """
45
-
46
- UPLOADER_PATHS_HELP = """
47
- A list of paths to upload. Use quotes if paths contain spaces or special characters"""
48
-
49
- DOWNLOADER_JOB_ID_HELP = """
50
- The job id(s) to download. When specified will only download those jobs and terminate afterwards
51
- """
52
-
53
- DOWNLOADER_DEST_HELP = """
54
- Override the output directory"""
55
-
56
- PACKAGES_FORMAT_OPTIONS = ["text", "markdown", "html"]
57
- PACKAGES_FORMAT_HELP = """
58
- text: The output is a simple list of software names and versions, with nesting to indicate plugin
59
- compatibility. Output is sent to stdout.
60
-
61
- markdown: Designed for use in other markdown documentation systems where it benefits from consistent
62
- styling. Output is sent to stdout and can be piped to a file.
63
-
64
- html: Opens a browser window and displays the output in a simple html page.
65
- """
66
-
67
- LOG_FORMATTER = logging.Formatter(
68
- "%(asctime)s %(name)s%(levelname)9s %(threadName)s: %(message)s"
69
- )
70
-
71
-
72
- cfg = config.get()
73
- DEFAULT_CONFIG_MD5_CACHING = cfg["md5_caching"]
74
- DEFAULT_CONFIG_THREAD_COUNT = cfg["thread_count"]
75
- DEFAULT_CONFIG_LOG_LEVEL = cfg["log_level"]
76
-
77
- def _set_logging(log_level=None, log_filepath=None):
78
- level = loggeria.LEVEL_MAP.get(log_level)
79
- loggeria.setup_conductor_logging(
80
- logger_level=level,
81
- console_formatter=LOG_FORMATTER,
82
- file_formatter=LOG_FORMATTER,
83
- log_filepath=log_filepath,
84
- )
85
-
86
- def _register(client):
87
- api_client.ApiClient.register_client(
88
- client_name=client.CLIENT_NAME, client_version=VERSION
89
- )
90
-
91
- ########################### MAIN #################################
92
- @click.group(invoke_without_command=True)
93
- @click.pass_context
94
- @click.option('-v', '--version', is_flag=True, help='Print the version and exit.')
95
- def main(ctx, version):
96
- """Conductor Command-line interface."""
97
- if not ctx.invoked_subcommand:
98
- if version:
99
- click.echo(VERSION)
100
- ctx.exit()
101
- click.echo(ctx.get_help())
102
- ctx.exit()
103
-
104
- ############################# UPLOADER #############################
105
- @main.command()
106
- @click.option("-db", "--database_filepath", help=UPLOADER_DATABASE_FILEPATH_HELP)
107
- @click.option( "-md5", "--md5_caching", help=UPLOADER_MD5_CACHING_HELP, type=bool, default=DEFAULT_CONFIG_MD5_CACHING,)
108
- @click.option("-lv", "--log_level", help=LOG_LEVEL_HELP, type=click.Choice(choices=loggeria.LEVELS, case_sensitive=False), show_choices=True, default=DEFAULT_CONFIG_LOG_LEVEL)
109
- @click.option("-ld", "--log_dir", help=LOG_DIR_HELP)
110
- @click.option("-tc", "--thread_count", type=int, help=THREADS_HELP, default=DEFAULT_CONFIG_THREAD_COUNT)
111
- @click.option("-lc", "--location", help=LOCATION_HELP)
112
- @click.argument("paths", nargs=-1, type=click.Path(exists=True, resolve_path=True))
113
- def upload(
114
- database_filepath, location, md5_caching, log_level, log_dir, thread_count, paths
115
- ):
116
- """Upload files to Conductor.
117
-
118
- With no arguments, the uploader runs in daemon mode, watching for files to upload for submitted
119
- jobs.
120
-
121
- Alternatively, specify a list of paths to upload.
122
-
123
- Example:
124
- conductor upload file1 file2 file3
125
- """
126
- logfile = log_dir and os.path.join(log_dir, "conductor_ul.log")
127
- _set_logging(log_level, logfile)
128
-
129
- args_dict = {
130
- "database_filepath": database_filepath,
131
- "location": location,
132
- "md5_caching": md5_caching,
133
- "log_level": log_level,
134
- "log_dir": log_dir,
135
- "thread_count": thread_count,
136
- }
137
-
138
- up = Uploader(args_dict)
139
-
140
- if paths:
141
- up.assets_only(*paths)
142
- return
143
-
144
- up.main()
145
-
146
- ########################### DOWNLOADER #############################
147
- @main.command()
148
- @click.argument("jobids", nargs=-1)
149
- @click.option("-d", "--destination", help=DOWNLOADER_DEST_HELP)
150
- @click.option("-lv", "--log_level", help=LOG_LEVEL_HELP, type=click.Choice(choices=loggeria.LEVELS, case_sensitive=False), show_choices=True, default=DEFAULT_CONFIG_LOG_LEVEL)
151
- @click.option("-ld", "--log_dir", help=LOG_DIR_HELP)
152
- @click.option("-tc", "--thread_count", type=int, help=THREADS_HELP, default=DEFAULT_CONFIG_THREAD_COUNT)
153
- @click.option("-lc", "--location", help=LOCATION_HELP)
154
-
155
- def download(jobids, destination, location, log_level, log_dir, thread_count):
156
- """
157
- Download renders and other output files from Conductor. You can give a list of job ids to
158
- download, or you can omit jobids and the downloader will run in daemon mode.
159
-
160
- If you provide jobids, the default behavior is to download all the files from completed tasks
161
- for those jobs. You can however specify an explicit set of tasks to downloade by providing a
162
- task range spec after each job id. To do so, append a colon to the job id and then a compact
163
- task specification. See the examples.
164
-
165
- In daemon mode, the downloader polls for new jobs to download. You can specify a location tag to
166
- limit the scope of the downloader to only download jobs that were submitted with the same
167
- location tag.
168
-
169
- Examples:
170
- conductor download # daemon mode
171
- conductor download 1234 1235
172
- conductor download 1234:1-10
173
- conductor download 1234:1-5x2,10,12-14
174
- conductor download 1234:1-5 1235:5-10
175
- """
176
- logfile = log_dir and os.path.join(log_dir, "conductor_dl.log")
177
- _set_logging(log_level, logfile)
178
- _register(Downloader)
179
- if jobids:
180
- Downloader.download_jobs(
181
- jobids,
182
- thread_count=thread_count,
183
- output_dir=destination,
184
- )
185
- else:
186
- Downloader.start_daemon(
187
- thread_count=thread_count, location=location, output_dir=destination
188
- )
189
-
190
-
191
- ########################### PACKAGES #############################
192
- @main.command()
193
- @click.option(
194
- "-f",
195
- "--fmt",
196
- "--format",
197
- default="text",
198
- help=PACKAGES_FORMAT_HELP,
199
- type=click.Choice(choices=PACKAGES_FORMAT_OPTIONS, case_sensitive=False),
200
- )
201
- def packages(fmt):
202
- """List the softweare packages available on the render nodes in the cloud."""
203
- package_query.pq(format=fmt)
204
-
205
-
206
- ########################### DEPRECATIONS #############################
207
-
208
- ############################# UPLOADER #############################
209
- DEPRECATED_PATHS_HELP="Specify a list of paths to upload."
210
-
211
- @main.command(deprecated=True)
212
- @click.option("-db", "--database_filepath", help=UPLOADER_DATABASE_FILEPATH_HELP)
213
- @click.option( "-md5", "--md5_caching", help=UPLOADER_MD5_CACHING_HELP, type=bool, default=DEFAULT_CONFIG_MD5_CACHING,)
214
- @click.option("-lv", "--log_level", help=LOG_LEVEL_HELP, type=click.Choice(choices=loggeria.LEVELS, case_sensitive=False), show_choices=True, default=DEFAULT_CONFIG_LOG_LEVEL)
215
- @click.option("-ld", "--log_dir", help=LOG_DIR_HELP)
216
- @click.option("-tc", "--thread_count", type=int, help=THREADS_HELP, default=DEFAULT_CONFIG_THREAD_COUNT)
217
- @click.option("-lc", "--location", help=LOCATION_HELP)
218
- @click.option("-p", "--paths", help=DEPRECATED_PATHS_HELP, multiple=True, type=click.Path(exists=True, resolve_path=True))
219
-
220
-
221
-
222
- def uploader(
223
- database_filepath, location, md5_caching, log_level, log_dir, thread_count, paths
224
- ):
225
- """Upload files to Conductor.
226
-
227
- With no arguments, the uploader runs in daemon mode, watching for files to upload for submitted
228
- jobs.
229
-
230
- Alternatively, specify a list of paths to upload with the --paths option.
231
-
232
- Example:
233
- conductor upload file1 file2 file3
234
- """
235
- logfile = log_dir and os.path.join(log_dir, "conductor_ul.log")
236
- _set_logging(log_level, logfile)
237
-
238
- args_dict = {
239
- "database_filepath": database_filepath,
240
- "location": location,
241
- "md5_caching": md5_caching,
242
- "log_level": log_level,
243
- "log_dir": log_dir,
244
- "thread_count": thread_count,
245
- }
246
-
247
- up = Uploader(args_dict)
248
-
249
- if paths:
250
- up.assets_only(*paths)
251
- return
252
-
253
- up.main()
254
-
255
- ########################### DOWNLOADER #############################
256
- DEPRECATED_JOBID_HELP="Download all the files from completed tasks for the given jobs."
257
- DEPRECATED_TASKID_HELP="Download the files from the given tasks in the specified job."
258
-
259
- @main.command(deprecated=True)
260
- @click.option("-o", "--output", help=DOWNLOADER_DEST_HELP)
261
- @click.option("-j", "--job_id", help=DEPRECATED_JOBID_HELP)
262
- @click.option("-t", "--task_id", help=DEPRECATED_TASKID_HELP)
263
- @click.option("-lv", "--log_level", help=LOG_LEVEL_HELP, type=click.Choice(choices=loggeria.LEVELS, case_sensitive=False), show_choices=True, default=DEFAULT_CONFIG_LOG_LEVEL)
264
- @click.option("-ld", "--log_dir", help=LOG_DIR_HELP)
265
- @click.option("-tc", "--thread_count", type=int, help=THREADS_HELP, default=DEFAULT_CONFIG_THREAD_COUNT)
266
- @click.option("-lc", "--location", help=LOCATION_HELP)
267
- @click.pass_context
268
- def downloader(ctx, job_id, task_id, output, location, log_level, log_dir, thread_count):
269
- """
270
- Download renders and other output files from Conductor. You can specify a job id and optional task id to
271
- download, or you can omit all options and the downloader will run in daemon mode.
272
-
273
- In daemon mode, the downloader polls for new jobs to download. You can specify a location tag to
274
- limit the scope of the downloader to only download jobs that were submitted with the same
275
- location tag.
276
-
277
- Examples:
278
- conductor downloader # daemon mode
279
- conductor downloader --job_id --task_id 01234
280
- conductor downloader --task_id --task_id 123
281
- """
282
-
283
- logfile = log_dir and os.path.join(log_dir, "conductor_dl.log")
284
- _set_logging(log_level, logfile)
285
- _register(Downloader)
286
-
287
- if not job_id and not task_id:
288
- Downloader.start_daemon(
289
- thread_count=thread_count, location=location, output_dir=output
290
- )
291
- ctx.exit(0)
292
-
293
- if task_id:
294
- job_id = job_id + ":" + task_id
295
-
296
- Downloader.download_jobs(
297
- (job_id,),
298
- thread_count=thread_count,
299
- output_dir=output,
300
- )
ciocore/package_query.py DELETED
@@ -1,171 +0,0 @@
1
- """
2
- Generate markdown from the software packages list.
3
- """
4
- import os
5
- import sys
6
- import json
7
- from ciocore.package_tree import PackageTree
8
- from ciocore import api_client
9
- import markdown
10
- import io
11
- import tempfile
12
- import webbrowser
13
-
14
- PURE = """
15
- <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/purecss@3.0.0/build/pure-min.css" integrity="sha384-X38yfunGUhNzHpBaEBsWLO+A0HDYOQi8ufWDkZ0k9e0eXz/tH3II7uKZ9msv++Ls" crossorigin="anonymous">
16
- <meta name="viewport" content="width=device-width, initial-scale=1">
17
- """
18
-
19
- def green(rhs):
20
- return "\033[92m{}\033[0m".format(rhs)
21
-
22
-
23
- def red(rhs):
24
- return "\033[91m{}\033[0m".format(rhs)
25
-
26
-
27
- def blue(rhs):
28
- return "\033[94m{}\033[0m".format(rhs)
29
-
30
-
31
- def magenta(rhs):
32
- return "\033[95m{}\033[0m".format(rhs)
33
-
34
-
35
- def raw(rhs, stream):
36
- stream.write("{}\n\n".format(rhs))
37
-
38
-
39
- def d(n, rhs, stream):
40
- """Indent with dashes"""
41
- stream.write("{} {}\n".format("-" * n, rhs))
42
-
43
-
44
- def hr(stream):
45
- stream.write("---\n\n")
46
-
47
-
48
- def h(n, rhs, stream):
49
- stream.write("{} {}\n\n".format("#" * n, rhs))
50
-
51
-
52
- def plugin_table_header(stream):
53
- stream.write(
54
- '|<div style="width:150px">Plugin</div> |<div style="min-width:400px">Versions</div>|\n|:------------|:-------------|\n'
55
- )
56
-
57
-
58
- def plugin_table_row(plugin, versions, stream):
59
- stream.write("|{}|{}|\n".format(plugin, versions))
60
-
61
-
62
- def write_markdown(hostnames, tree_data, platform, stream):
63
- """
64
- Write the tree of packages in Markdown.
65
-
66
- Use this to generate docs for the Conductor mkdocs site.
67
- """
68
- if not hostnames:
69
- return
70
- h(2, "{} Software".format(platform.capitalize()), stream)
71
- last_hostgroup = None
72
- for hostname in hostnames:
73
- display_hostname = " ".join(hostname.split()[:2])
74
- hostgroup = hostname.split(" ")[0]
75
- stream.write("\n")
76
- if not hostgroup == last_hostgroup:
77
- hr(stream)
78
- h(3, hostgroup, stream)
79
- h(4, display_hostname, stream)
80
- last_hostgroup = hostgroup
81
- plugins = tree_data.supported_plugins(hostname)
82
- if plugins:
83
- plugin_table_header(stream)
84
- for plugin in plugins:
85
- plugin_table_row(
86
- plugin["plugin"], ", ".join(plugin["versions"]), stream
87
- )
88
-
89
-
90
- def write_text(hostnames, tree_data, platform, color_func, stream):
91
- """
92
- Write the tree of packages as text.
93
-
94
- Products are indented with one dash.
95
- Host packages are indented with two dashes.
96
- Plugin packages are indented with three dashes.
97
- """
98
- if not hostnames:
99
- d(0, red("There are no '{}' host packages".format(platform)), stream)
100
- return
101
- d(0, "{} Software".format(platform).upper(), stream)
102
- last_hostgroup = None
103
- for hostname in hostnames:
104
- display_hostname = " ".join(hostname.split()[:2])
105
- hostgroup = hostname.split(" ")[0]
106
- if not hostgroup == last_hostgroup:
107
- d(0, green("-" * 30), stream)
108
- d(1, color_func(hostgroup), stream)
109
- d(2, color_func(display_hostname), stream)
110
- last_hostgroup = hostgroup
111
- plugins = tree_data.supported_plugins(hostname)
112
- if plugins:
113
- for plugin in plugins:
114
- d(
115
- 3,
116
- color_func(
117
- "{} [{}]".format(
118
- plugin["plugin"], ", ".join(plugin["versions"])
119
- )
120
- ),
121
- stream,
122
- )
123
-
124
-
125
- def pq(format="text"):
126
- packages = api_client.request_software_packages()
127
-
128
- tree_data = PackageTree(packages)
129
-
130
- hostnames = tree_data.supported_host_names()
131
- linux_hostnames = [h for h in hostnames if h.endswith("linux")]
132
- windows_hostnames = [h for h in hostnames if h.endswith("windows")]
133
-
134
- if format == "markdown":
135
- stream = sys.stdout
136
- raw(
137
- "This page contains the complete list of software available at Conductor. If you require applications or plugins that are not in the list, please [create a support ticket](https://support.conductortech.com/hc/en-us/requests/new) and let us know.",
138
- stream,
139
- )
140
- write_markdown(linux_hostnames, tree_data, "linux", stream)
141
- write_markdown(windows_hostnames, tree_data, "windows", stream)
142
- elif format == "text":
143
- stream = sys.stdout
144
- write_text(linux_hostnames, tree_data, "linux", magenta, stream)
145
- d(0, "", stream)
146
- write_text(windows_hostnames, tree_data, "windows", blue, stream)
147
- elif format == "html":
148
- stream = io.StringIO()
149
- raw(
150
- "This page contains the complete list of software available at Conductor. If you require applications or plugins that are not in the list, please [create a support ticket](https://support.conductortech.com/hc/en-us/requests/new) and let us know.",
151
- stream,
152
- )
153
-
154
- write_markdown(linux_hostnames, tree_data, "linux", stream)
155
- write_markdown(windows_hostnames, tree_data, "windows", stream)
156
-
157
- html = markdown.markdown(
158
- stream.getvalue(), extensions=["markdown.extensions.tables"]
159
- )
160
-
161
- html = decorate(html)
162
-
163
- stream.close()
164
- with tempfile.NamedTemporaryFile(mode="w", suffix=".html", delete=False) as f:
165
- f.write(html)
166
- webbrowser.open("file://" + f.name, new=2)
167
-
168
- def decorate(html):
169
- html = html.replace("<table>", '<table class="pure-table pure-table-bordered">')
170
- html = '<html><head>{}</head><body style="margin: 2em;">{}</body></html>'.format(PURE, html)
171
- return html
@@ -1,2 +0,0 @@
1
- [console_scripts]
2
- conductor = ciocore.cli:main
tests/test_cli.py DELETED
@@ -1,161 +0,0 @@
1
- from click.testing import CliRunner
2
- from ciocore.cli import upload as cli_upload
3
- from ciocore.cli import download as cli_download
4
- import unittest
5
- import os
6
- from unittest import mock
7
-
8
-
9
- class CliTestUploaderOptions(unittest.TestCase):
10
- def setUp(self):
11
- self.runner = CliRunner()
12
- self.default_args = {
13
- "database_filepath": None,
14
- "location": None,
15
- "md5_caching": True,
16
- "log_level": "INFO",
17
- "log_dir": None,
18
- "thread_count": 16,
19
- }
20
- init_patcher = mock.patch("ciocore.cli.Uploader.__init__", autospec=True)
21
- self.mock_init = init_patcher.start()
22
- self.addCleanup(init_patcher.stop)
23
-
24
- def test_receives_full_args_dict_with_defaults_when_no_args_given(self):
25
- self.runner.invoke(cli_upload, [])
26
- self.mock_init.assert_called_once_with(mock.ANY, self.default_args)
27
-
28
- def test_database_filepath_arg(self):
29
- self.runner.invoke(cli_upload, ["--database_filepath", "foo"])
30
- expected = self.default_args
31
- expected.update({"database_filepath": "foo"})
32
- self.mock_init.assert_called_once_with(mock.ANY, expected)
33
-
34
- def test_location_arg(self):
35
- self.runner.invoke(cli_upload, ["--location", "foo"])
36
- expected = self.default_args
37
- expected.update({"location": "foo"})
38
- self.mock_init.assert_called_once_with(mock.ANY, expected)
39
-
40
- def test_md5_caching_arg(self):
41
- self.runner.invoke(cli_upload, ["--md5_caching", False])
42
- expected = self.default_args
43
- expected.update({"md5_caching": False})
44
- self.mock_init.assert_called_once_with(mock.ANY, expected)
45
-
46
- def test_log_level_arg(self):
47
- self.runner.invoke(cli_upload, ["--log_level", "DEBUG"])
48
- expected = self.default_args
49
- expected.update({"log_level": "DEBUG"})
50
- self.mock_init.assert_called_once_with(mock.ANY, expected)
51
-
52
- def test_log_dir_arg(self):
53
- self.runner.invoke(cli_upload, ["--log_dir", "foo"])
54
- expected = self.default_args
55
- expected.update({"log_dir": "foo"})
56
- self.mock_init.assert_called_once_with(mock.ANY, expected)
57
-
58
- def test_thread_count_arg(self):
59
- self.runner.invoke(cli_upload, ["--thread_count", 4])
60
- expected = self.default_args
61
- expected.update({"thread_count": 4})
62
- self.mock_init.assert_called_once_with(mock.ANY, expected)
63
-
64
-
65
- class CliTestUploaderArguments(unittest.TestCase):
66
- def setUp(self):
67
- self.runner = CliRunner()
68
-
69
- uploader_patcher = mock.patch("ciocore.cli.Uploader", autospec=True)
70
- self.mock_uploader = uploader_patcher.start()
71
- self.mock_inst = self.mock_uploader.return_value
72
- self.addCleanup(uploader_patcher.stop)
73
-
74
- def test_path_only_branch_if_paths(self):
75
- with self.runner.isolated_filesystem():
76
- filenames = ["foo.txt", "bar.txt", "baz.txt", "qux.txt"]
77
- filenames = [os.path.join(os.getcwd(), filename) for filename in filenames]
78
- for filename in filenames:
79
- with open(filename, "w") as f:
80
- f.write("hello world")
81
- self.runner.invoke(cli_upload, filenames)
82
- self.mock_inst.assets_only.assert_called_once_with(
83
- mock.ANY, mock.ANY, mock.ANY, mock.ANY
84
- )
85
- self.mock_inst.main.assert_not_called()
86
-
87
- def test_main_branch_if_no_paths(self):
88
- with self.runner.isolated_filesystem():
89
- self.runner.invoke(cli_upload)
90
- self.mock_inst.main.assert_called_once()
91
- self.mock_inst.assets_only.assert_not_called()
92
-
93
-
94
- class CliTestDownloader(unittest.TestCase):
95
- def setUp(self):
96
- self.runner = CliRunner()
97
-
98
- dljobs_patcher = mock.patch(
99
- "ciocore.cli.Downloader.download_jobs", autospec=True
100
- )
101
- self.mock_dljobs = dljobs_patcher.start()
102
- self.addCleanup(dljobs_patcher.stop)
103
-
104
- start_daemon_patcher = mock.patch(
105
- "ciocore.cli.Downloader.start_daemon", autospec=True
106
- )
107
- self.mock_start_daemon = start_daemon_patcher.start()
108
- self.addCleanup(start_daemon_patcher.stop)
109
-
110
- def test_jobid_branch_if_job_id(self):
111
- jid = "00000"
112
- self.runner.invoke(cli_download, [jid])
113
- self.mock_dljobs.assert_called_once()
114
- self.mock_start_daemon.assert_not_called()
115
-
116
- def test_job_id_only(self):
117
- jid = "00000"
118
- self.runner.invoke(cli_download, [jid])
119
- self.mock_dljobs.assert_called_once_with(
120
- (jid,), thread_count=mock.ANY, output_dir=mock.ANY
121
- )
122
-
123
- def test_several_job_ids(self):
124
- jid1 = "00000"
125
- jid2 = "11111"
126
- self.runner.invoke(cli_download, [jid1, jid2])
127
- self.mock_dljobs.assert_called_once_with(
128
- (jid1,jid2), thread_count=mock.ANY, output_dir=mock.ANY
129
- )
130
-
131
- def test_job_ids_and_others(self):
132
- jid1 = "00000"
133
- jid2 = "11111"
134
- tc = 4
135
- od = "foo"
136
- self.runner.invoke(
137
- cli_download,
138
- ["--thread_count", tc, "--destination", od, jid1, jid2 ],
139
- )
140
- self.mock_dljobs.assert_called_once_with(
141
- (jid1,jid2), thread_count=tc, output_dir=od
142
- )
143
-
144
- def test_daemon_branch_if_no_job_id(self):
145
- self.runner.invoke(cli_download, [])
146
- self.mock_start_daemon.assert_called_once()
147
- self.mock_dljobs.assert_not_called()
148
-
149
- def test_daemon_branch_args_present(self):
150
- tc = 4
151
- od = "foo"
152
- self.runner.invoke(cli_download)
153
- self.mock_start_daemon.assert_called_once_with(thread_count=mock.ANY, location=mock.ANY, output_dir=mock.ANY)
154
-
155
- def test_daemon_branch_args(self):
156
- tc = 4
157
- od = "foo"
158
- loc = "bar"
159
- self.runner.invoke(cli_download, ["--thread_count", tc, "--destination", od, "--location", loc])
160
- self.mock_start_daemon.assert_called_once_with(thread_count=tc, location="bar", output_dir=od)
161
-
tests/test_downloader.py DELETED
@@ -1,56 +0,0 @@
1
-
2
- from ciocore.downloader import Downloader
3
- import unittest
4
- from unittest import mock
5
-
6
- class TestDownloaderFlatten(unittest.TestCase):
7
-
8
- def test_flatten_single_job_id(self):
9
- input = ("01234",)
10
- result = Downloader._flatten(input)
11
- self.assertEqual(result, [{"job_id": "01234", "tasks":None}])
12
-
13
- def test_flatten_pad_job_id(self):
14
- input = ("1234",)
15
- result = Downloader._flatten(input)
16
- self.assertEqual(result, [{"job_id": "01234", "tasks":None}])
17
-
18
- def test_several_job_ids(self):
19
- input = ("1234","1235","1236")
20
- result = Downloader._flatten(input)
21
- self.assertEqual(result, [
22
- {"job_id": "01234", "tasks":None},
23
- {"job_id": "01235", "tasks":None},
24
- {"job_id": "01236", "tasks":None}
25
- ])
26
-
27
- def test_job_and_tasks(self):
28
- input = ("1234:1-7x2,10",)
29
- result = Downloader._flatten(input)
30
- self.assertEqual(result, [{"job_id": "01234", "tasks":["001","003","005","007","010"]}])
31
-
32
- def test_several_job_and_tasks(self):
33
- input = ("1234:1-7x2,10","1235:12-15")
34
- result = Downloader._flatten(input)
35
- self.assertEqual(result, [
36
- {"job_id": "01234", "tasks":["001","003","005","007","010"]},
37
- {"job_id": "01235", "tasks":["012","013","014","015"]}
38
- ])
39
-
40
- def test_mix_job_and_job_with_tasks(self):
41
- input = ("1234","1235:12-15")
42
- result = Downloader._flatten(input)
43
- self.assertEqual(result, [
44
- {"job_id": "01234", "tasks":None},
45
- {"job_id": "01235", "tasks":["012","013","014","015"]}
46
- ])
47
-
48
- def test_invalid_range_downloads_whole_job(self):
49
- # Someone might have a bunch of stuff queued up and made a mistake and left for the night.
50
- # We should download the whole job in this case so they don't have to restart the dl in the
51
- # morning.
52
- input = ("1234:badrange",)
53
- result = Downloader._flatten(input)
54
- self.assertEqual(result, [
55
- {"job_id": "01234", "tasks":None}
56
- ])