flux-batch 0.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2022-2023 LLNS, LLC and other HPCIC DevTools Developers.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,6 @@
1
+ recursive-include flux_batch *
2
+ prune env*
3
+ global-exclude .env
4
+ global-exclude *.py[co]
5
+ recursive-exclude .git *
6
+ global-exclude __pycache__
@@ -0,0 +1,21 @@
1
+ This work was produced under the auspices of the U.S. Department of
2
+ Energy by Lawrence Livermore National Laboratory under Contract
3
+ DE-AC52-07NA27344.
4
+
5
+ This work was prepared as an account of work sponsored by an agency of
6
+ the United States Government. Neither the United States Government nor
7
+ Lawrence Livermore National Security, LLC, nor any of their employees
8
+ makes any warranty, expressed or implied, or assumes any legal liability
9
+ or responsibility for the accuracy, completeness, or usefulness of any
10
+ information, apparatus, product, or process disclosed, or represents that
11
+ its use would not infringe privately owned rights.
12
+
13
+ Reference herein to any specific commercial product, process, or service
14
+ by trade name, trademark, manufacturer, or otherwise does not necessarily
15
+ constitute or imply its endorsement, recommendation, or favoring by the
16
+ United States Government or Lawrence Livermore National Security, LLC.
17
+
18
+ The views and opinions of authors expressed herein do not necessarily
19
+ state or reflect those of the United States Government or Lawrence
20
+ Livermore National Security, LLC, and shall not be used for advertising
21
+ or product endorsement purposes.
@@ -0,0 +1,153 @@
1
+ Metadata-Version: 2.1
2
+ Name: flux-batch
3
+ Version: 0.0.0
4
+ Summary: Python SDK for flux batch jobs and services
5
+ Home-page: https://github.com/converged-computing/flux-batch
6
+ Author: Vanessa Sochat
7
+ Author-email: vsoch@users.noreply.github.com
8
+ Maintainer: Vanessa Sochat
9
+ License: LICENSE
10
+ Keywords: flux,flux framework,hpc,batch,workloads
11
+ Classifier: Intended Audience :: Science/Research
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: C
15
+ Classifier: Programming Language :: Python
16
+ Classifier: Topic :: Software Development
17
+ Classifier: Topic :: Scientific/Engineering
18
+ Classifier: Operating System :: Unix
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Description-Content-Type: text/markdown
21
+ License-File: LICENSE
22
+ License-File: NOTICE
23
+ Requires-Dist: pyyaml
24
+ Requires-Dist: ply
25
+ Provides-Extra: all
26
+ Requires-Dist: pyyaml; extra == "all"
27
+ Requires-Dist: ply; extra == "all"
28
+ Requires-Dist: pytest>=4.6.2; extra == "all"
29
+
30
+ # flux-batch
31
+
32
+ > Python SDK to generate Flux batch jobs and services
33
+
34
+ [![PyPI version](https://badge.fury.io/py/flux-batch.svg)](https://badge.fury.io/py/flux-batch)
35
+
36
+ ![https://github.com/converged-computing/flux-batch/raw/main/img/flux-batch-small.png](https://github.com/converged-computing/flux-batch/raw/main/img/flux-batch-small.png)
37
+
38
+ ## Related Projects
39
+
40
+ - [flux-mcp](https://github.com/converged-computing/flux-mcp): MCP functions for Flux.
41
+ - [flux-mcp-server](https://github.com/converged-computing/flux-mcp-server): MCP server.
42
+ - [fractale-mcp](https://github.com/compspec/fractale-mcp): (fractale) MCP orchestration (agents, databases, ui interfaces).
43
+ - [hpc-mcp](https://github.com/converged-computing/hpc-mcp): HPC tools for a larger set of HPC and converged computing use cases.
44
+
45
+ ## Services
46
+
47
+ - **flux-scribe**: Write job events to a local sqlite database via the JournalConsumer (not added yet, written and needs testing)
48
+
49
+ ## Usage
50
+
51
+ This is a small Flux utility that makes it easy to create Flux batch jobs and services.
52
+ The use case is to submit work (one or more jobs) under an instance, and run a custom service, or epilog and prolog commands. We will provision several services here, and you can also provide your own name to start / stop.
53
+
54
+ ### Setup
55
+
56
+ Install the library and start (or be in) a flux instance.
57
+
58
+ ```bash
59
+ flux start
60
+ pip install -e . --break-system-packages
61
+ ```
62
+
63
+ ### Example
64
+
65
+ Run the controlled example to see a batch job with prolog and epilog run and complete:
66
+
67
+ ```bash
68
+ python3 ./tests/test_flux_batch.py
69
+ ```
70
+ ```console
71
+ Flux Batch Module Test
72
+ [OK] Connected to Flux.
73
+ [*] Creating batch jobs...
74
+ [*] Mapping attributes to BatchJobspecV1...
75
+ [*] Previewing submission (Dryrun -> Wrapper)...
76
+ #!/bin/bash
77
+ echo 'Batch Wrapper Starting'
78
+ flux submit --wait /bin/echo 'Job 1 starting'
79
+ flux submit --wait /bin/sleep 5
80
+ flux submit --wait /bin/echo 'Job 2 finished'
81
+ flux job wait --all
82
+ echo 'Batch Wrapper Finished'
83
+ [*] Performing submission (Dryrun -> Wrapper -> Submit)...
84
+ [SUCCESS] Batch submitted! Flux Job ID: ƒMX29AwFu
85
+ ```
86
+ ```bash
87
+ $ flux jobs -a
88
+ JOBID USER NAME ST NTASKS NNODES TIME INFO
89
+ ƒMX29AwFu vscode test-batch R 1 1 4.213s 68e8c4399c15
90
+ ```
91
+ ```bash
92
+ $ flux jobs -a
93
+ JOBID USER NAME ST NTASKS NNODES TIME INFO
94
+ ƒMX29AwFu vscode test-batch CD 1 1 6.354s 68e8c4399c15
95
+ ```
96
+
97
+ Here is an explicit (manual) example to do the same:
98
+
99
+ ```python
100
+ import flux
101
+ import flux_batch
102
+
103
+ # for pretty printing
104
+ # from rich import print
105
+
106
+ handle = flux.Flux()
107
+
108
+ # Create your batch job with some number of commands
109
+ batch = flux_batch.BatchJobV1()
110
+ batch.add_job(["echo", "Job 1 starting"])
111
+ batch.add_job(["sleep", "5"])
112
+ batch.add_job(["echo", "Job 2 finished"])
113
+
114
+ # Wrap it up into a jobspec
115
+ jobspec = flux_batch.BatchJobspecV1.from_jobs(
116
+ batch,
117
+ nodes=1,
118
+ nslots=1,
119
+ time_limit="10m",
120
+ job_name="test-batch"
121
+ )
122
+
123
+ # Add a prolog and epilog
124
+ jobspec.add_prolog("echo 'Batch Wrapper Starting'")
125
+ jobspec.add_epilog("echo 'Batch Wrapper Finished'")
126
+
127
+ # Add a service (this assumes user level that exists)
128
+ # jobspec.add_service("my-service'")
129
+
130
+ # Preview it
131
+ print(flux_batch.submit(handle, jobspec, dry_run=True))
132
+
133
+ # Submit that bad boi.
134
+ jobid = flux_batch.submit(handle, jobspec)
135
+ ```
136
+
137
+ ## TODO
138
+
139
+ - Option for controlled output (that we can easily get after)
140
+ - Create flux-scribe service, add example (need to test on cluster with systemctl)
141
+
142
+ ## License
143
+
144
+ HPCIC DevTools is distributed under the terms of the MIT license.
145
+ All new contributions must be made under this license.
146
+
147
+ See [LICENSE](https://github.com/converged-computing/cloud-select/blob/main/LICENSE),
148
+ [COPYRIGHT](https://github.com/converged-computing/cloud-select/blob/main/COPYRIGHT), and
149
+ [NOTICE](https://github.com/converged-computing/cloud-select/blob/main/NOTICE) for details.
150
+
151
+ SPDX-License-Identifier: (MIT)
152
+
153
+ LLNL-CODE- 842614
@@ -0,0 +1,124 @@
1
+ # flux-batch
2
+
3
+ > Python SDK to generate Flux batch jobs and services
4
+
5
+ [![PyPI version](https://badge.fury.io/py/flux-batch.svg)](https://badge.fury.io/py/flux-batch)
6
+
7
+ ![https://github.com/converged-computing/flux-batch/raw/main/img/flux-batch-small.png](https://github.com/converged-computing/flux-batch/raw/main/img/flux-batch-small.png)
8
+
9
+ ## Related Projects
10
+
11
+ - [flux-mcp](https://github.com/converged-computing/flux-mcp): MCP functions for Flux.
12
+ - [flux-mcp-server](https://github.com/converged-computing/flux-mcp-server): MCP server.
13
+ - [fractale-mcp](https://github.com/compspec/fractale-mcp): (fractale) MCP orchestration (agents, databases, ui interfaces).
14
+ - [hpc-mcp](https://github.com/converged-computing/hpc-mcp): HPC tools for a larger set of HPC and converged computing use cases.
15
+
16
+ ## Services
17
+
18
+ - **flux-scribe**: Write job events to a local sqlite database via the JournalConsumer (not added yet, written and needs testing)
19
+
20
+ ## Usage
21
+
22
+ This is a small Flux utility that makes it easy to create Flux batch jobs and services.
23
+ The use case is to submit work (one or more jobs) under an instance, and run a custom service, or epilog and prolog commands. We will provision several services here, and you can also provide your own name to start / stop.
24
+
25
+ ### Setup
26
+
27
+ Install the library and start (or be in) a flux instance.
28
+
29
+ ```bash
30
+ flux start
31
+ pip install -e . --break-system-packages
32
+ ```
33
+
34
+ ### Example
35
+
36
+ Run the controlled example to see a batch job with prolog and epilog run and complete:
37
+
38
+ ```bash
39
+ python3 ./tests/test_flux_batch.py
40
+ ```
41
+ ```console
42
+ Flux Batch Module Test
43
+ [OK] Connected to Flux.
44
+ [*] Creating batch jobs...
45
+ [*] Mapping attributes to BatchJobspecV1...
46
+ [*] Previewing submission (Dryrun -> Wrapper)...
47
+ #!/bin/bash
48
+ echo 'Batch Wrapper Starting'
49
+ flux submit --wait /bin/echo 'Job 1 starting'
50
+ flux submit --wait /bin/sleep 5
51
+ flux submit --wait /bin/echo 'Job 2 finished'
52
+ flux job wait --all
53
+ echo 'Batch Wrapper Finished'
54
+ [*] Performing submission (Dryrun -> Wrapper -> Submit)...
55
+ [SUCCESS] Batch submitted! Flux Job ID: ƒMX29AwFu
56
+ ```
57
+ ```bash
58
+ $ flux jobs -a
59
+ JOBID USER NAME ST NTASKS NNODES TIME INFO
60
+ ƒMX29AwFu vscode test-batch R 1 1 4.213s 68e8c4399c15
61
+ ```
62
+ ```bash
63
+ $ flux jobs -a
64
+ JOBID USER NAME ST NTASKS NNODES TIME INFO
65
+ ƒMX29AwFu vscode test-batch CD 1 1 6.354s 68e8c4399c15
66
+ ```
67
+
68
+ Here is an explicit (manual) example to do the same:
69
+
70
+ ```python
71
+ import flux
72
+ import flux_batch
73
+
74
+ # for pretty printing
75
+ # from rich import print
76
+
77
+ handle = flux.Flux()
78
+
79
+ # Create your batch job with some number of commands
80
+ batch = flux_batch.BatchJobV1()
81
+ batch.add_job(["echo", "Job 1 starting"])
82
+ batch.add_job(["sleep", "5"])
83
+ batch.add_job(["echo", "Job 2 finished"])
84
+
85
+ # Wrap it up into a jobspec
86
+ jobspec = flux_batch.BatchJobspecV1.from_jobs(
87
+ batch,
88
+ nodes=1,
89
+ nslots=1,
90
+ time_limit="10m",
91
+ job_name="test-batch"
92
+ )
93
+
94
+ # Add a prolog and epilog
95
+ jobspec.add_prolog("echo 'Batch Wrapper Starting'")
96
+ jobspec.add_epilog("echo 'Batch Wrapper Finished'")
97
+
98
+ # Add a service (this assumes user level that exists)
99
+ # jobspec.add_service("my-service'")
100
+
101
+ # Preview it
102
+ print(flux_batch.submit(handle, jobspec, dry_run=True))
103
+
104
+ # Submit that bad boi.
105
+ jobid = flux_batch.submit(handle, jobspec)
106
+ ```
107
+
108
+ ## TODO
109
+
110
+ - Option for controlled output (that we can easily get after)
111
+ - Create flux-scribe service, add example (need to test on cluster with systemctl)
112
+
113
+ ## License
114
+
115
+ HPCIC DevTools is distributed under the terms of the MIT license.
116
+ All new contributions must be made under this license.
117
+
118
+ See [LICENSE](https://github.com/converged-computing/cloud-select/blob/main/LICENSE),
119
+ [COPYRIGHT](https://github.com/converged-computing/cloud-select/blob/main/COPYRIGHT), and
120
+ [NOTICE](https://github.com/converged-computing/cloud-select/blob/main/NOTICE) for details.
121
+
122
+ SPDX-License-Identifier: (MIT)
123
+
124
+ LLNL-CODE- 842614
@@ -0,0 +1,7 @@
1
+ from .jobspec import BatchJobspecV1
2
+ from .models import BatchAttributesV1, BatchJobV1
3
+ from .submit import submit
4
+
5
+ __all__ = ["BatchJobV1", "BatchAttributesV1", "BatchJobspecV1", "submit"]
6
+
7
+ from .version import __version__ # noqa
@@ -0,0 +1,132 @@
1
+ import shlex
2
+ from typing import List
3
+
4
+ import flux_batch.models as models
5
+
6
+
7
+ class BatchJobspecV1:
8
+ """
9
+ A BatchJobspecV1 mirrors a JobspecV1. We need to:
10
+
11
+ 1. Add some number of commands or a script
12
+ 2. Add optional services (start/stop)
13
+ """
14
+
15
+ def __init__(self, attributes: models.BatchAttributesV1 = None):
16
+ self.attributes = attributes or models.BatchAttributesV1()
17
+ self.commands: List[str] = []
18
+ self.prologs: List[str] = []
19
+ self.epilogs: List[str] = []
20
+ self.services: List[str] = []
21
+
22
+ @classmethod
23
+ def from_command(cls, command: List[str], **kwargs):
24
+ inst = cls(models.BatchAttributesV1(**kwargs))
25
+ inst.commands = [shlex.join(command)]
26
+ return inst
27
+
28
+ @classmethod
29
+ def from_jobs(cls, batch: models.BatchJobV1, **kwargs):
30
+ """
31
+ Generate the batch script from a set of jobs.
32
+
33
+ With more than one job, we assume we are waiting.
34
+ """
35
+ inst = cls(models.BatchAttributesV1(**kwargs))
36
+ if len(batch.jobs) > 1:
37
+ for job_str in batch.jobs:
38
+ inst.commands.append(f"flux submit --wait {job_str}")
39
+ # Assume we want to wait for all jobs
40
+ inst.commands.append("flux job wait --all")
41
+ else:
42
+ inst.commands = batch.jobs
43
+ return inst
44
+
45
+ def add_service(self, service: str):
46
+ self.services.append(service)
47
+
48
+ def add_prolog(self, cmd: str):
49
+ self.prologs.append(cmd)
50
+
51
+ def add_epilog(self, cmd: str):
52
+ self.epilogs.append(cmd)
53
+
54
+ def get_cli_flags(self) -> List[str]:
55
+ """
56
+ Converts BatchAttributesV1 into a list of strings for subprocess.
57
+ """
58
+ flags = []
59
+ attr = self.attributes
60
+
61
+ # Mapping table for simple flags
62
+ mapping = {
63
+ "nslots": "-n",
64
+ "cores_per_slot": "-c",
65
+ "gpus_per_slot": "-g",
66
+ "nodes": "-N",
67
+ "bank": "-B",
68
+ "queue": "-q",
69
+ "time_limit": "-t",
70
+ "urgency": "--urgency",
71
+ "job_name": "--job-name",
72
+ "cwd": "--cwd",
73
+ "dependency": "--dependency",
74
+ "requires": "--requires",
75
+ "begin_time": "--begin-time",
76
+ "signal": "--signal",
77
+ "broker_opts": "--broker-opts",
78
+ "dump": "--dump",
79
+ "flags": "--flags",
80
+ }
81
+
82
+ for field_name, flag in mapping.items():
83
+ val = getattr(attr, field_name)
84
+ if val is not None:
85
+ flags.extend([flag, str(val)])
86
+
87
+ # Boolean flags
88
+ if attr.exclusive:
89
+ flags.append("-x")
90
+ if attr.unbuffered:
91
+ flags.append("-u")
92
+ if attr.wrap:
93
+ flags.append("--wrap")
94
+
95
+ # Multi-use flags
96
+ multi_mapping = {
97
+ "setopt": "-o",
98
+ "setattr": "-S",
99
+ "add_file": "--add-file",
100
+ "env": "--env",
101
+ "env_remove": "--env-remove",
102
+ "env_file": "--env-file",
103
+ "rlimit": "--rlimit",
104
+ "conf": "--conf",
105
+ }
106
+ for field_name, flag in multi_mapping.items():
107
+ for val in getattr(attr, field_name):
108
+ flags.extend([flag, str(val)])
109
+
110
+ return flags
111
+
112
+ def generate_wrapper_script(self) -> str:
113
+ """
114
+ Generate the wrapper script.
115
+
116
+ 1. Start with hashbang!
117
+ 2. Add prologs
118
+ 3. Add services start
119
+ 4. Add jobs/commands
120
+ 5. Stop services
121
+ 6. And epilogs
122
+ """
123
+
124
+ lines = ["#!/bin/bash"]
125
+ lines.extend(self.prologs)
126
+ for s in self.services:
127
+ lines.append(f"systemctl --user start {s}")
128
+ lines.extend(self.commands)
129
+ for s in reversed(self.services):
130
+ lines.append(f"systemctl --user stop {s}")
131
+ lines.extend(self.epilogs)
132
+ return "\n".join(lines)
@@ -0,0 +1 @@
1
+ from .logger import LogColors, logger, setup_logger
@@ -0,0 +1,208 @@
1
+ from random import choice
2
+
3
+
4
+ class JobNamer:
5
+ _descriptors = [
6
+ "chunky",
7
+ "buttery",
8
+ "delicious",
9
+ "scruptious",
10
+ "dinosaur",
11
+ "boopy",
12
+ "lovely",
13
+ "carnivorous",
14
+ "hanky",
15
+ "loopy",
16
+ "doopy",
17
+ "astute",
18
+ "gloopy",
19
+ "outstanding",
20
+ "stinky",
21
+ "conspicuous",
22
+ "fugly",
23
+ "frigid",
24
+ "angry",
25
+ "adorable",
26
+ "sticky",
27
+ "moolicious",
28
+ "cowy",
29
+ "spicy",
30
+ "grated",
31
+ "crusty",
32
+ "stanky",
33
+ "blank",
34
+ "bumfuzzled",
35
+ "fuzzy",
36
+ "hairy",
37
+ "peachy",
38
+ "tart",
39
+ "creamy",
40
+ "arid",
41
+ "strawberry",
42
+ "butterscotch",
43
+ "wobbly",
44
+ "persnickety",
45
+ "nerdy",
46
+ "dirty",
47
+ "placid",
48
+ "bloated",
49
+ "swampy",
50
+ "pusheena",
51
+ "hello",
52
+ "goodbye",
53
+ "milky",
54
+ "purple",
55
+ "rainbow",
56
+ "bricky",
57
+ "muffled",
58
+ "anxious",
59
+ "misunderstood",
60
+ "eccentric",
61
+ "quirky",
62
+ "lovable",
63
+ "reclusive",
64
+ "faux",
65
+ "evasive",
66
+ "confused",
67
+ "crunchy",
68
+ "expensive",
69
+ "ornery",
70
+ "fat",
71
+ "phat",
72
+ "joyous",
73
+ "expressive",
74
+ "psycho",
75
+ "chocolate",
76
+ "salted",
77
+ "gassy",
78
+ "red",
79
+ "blue",
80
+ ]
81
+
82
+ _nouns = [
83
+ "squidward",
84
+ "hippo",
85
+ "butter",
86
+ "animal",
87
+ "peas",
88
+ "lettuce",
89
+ "carrot",
90
+ "onion",
91
+ "peanut",
92
+ "cupcake",
93
+ "muffin",
94
+ "buttface",
95
+ "leopard",
96
+ "parrot",
97
+ "parsnip",
98
+ "poodle",
99
+ "itch",
100
+ "punk",
101
+ "kerfuffle",
102
+ "soup",
103
+ "noodle",
104
+ "avocado",
105
+ "peanut-butter",
106
+ "latke",
107
+ "milkshake",
108
+ "banana",
109
+ "lizard",
110
+ "lemur",
111
+ "lentil",
112
+ "bits",
113
+ "house",
114
+ "leader",
115
+ "toaster",
116
+ "signal",
117
+ "pancake",
118
+ "kitty",
119
+ "cat",
120
+ "cattywampus",
121
+ "poo",
122
+ "malarkey",
123
+ "general",
124
+ "rabbit",
125
+ "chair",
126
+ "staircase",
127
+ "underoos",
128
+ "snack",
129
+ "lamp",
130
+ "eagle",
131
+ "hobbit",
132
+ "diablo",
133
+ "earthworm",
134
+ "pot",
135
+ "plant",
136
+ "leg",
137
+ "arm",
138
+ "bike",
139
+ "citrus",
140
+ "dog",
141
+ "puppy",
142
+ "blackbean",
143
+ "ricecake",
144
+ "gato",
145
+ "nalgas",
146
+ "lemon",
147
+ "caramel",
148
+ "fudge",
149
+ "cherry",
150
+ "sundae",
151
+ "truffle",
152
+ "cinnamonbun",
153
+ "pastry",
154
+ "egg",
155
+ "omelette",
156
+ "fork",
157
+ "knife",
158
+ "spoon",
159
+ "salad",
160
+ "train",
161
+ "car",
162
+ "motorcycle",
163
+ "bicycle",
164
+ "platanos",
165
+ "mango",
166
+ "taco",
167
+ "pedo",
168
+ "nunchucks",
169
+ "destiny",
170
+ "hope",
171
+ "despacito",
172
+ "frito",
173
+ "chip",
174
+ ]
175
+
176
+ def generate(self, delim="-", length=4, chars="0123456789"):
177
+ """
178
+ Generate a robot name. Inspiration from Haikunator, but much more
179
+ poorly implemented ;)
180
+
181
+ Parameters
182
+ ==========
183
+ delim: Delimiter
184
+ length: TokenLength
185
+ chars: TokenChars
186
+ """
187
+
188
+ descriptor = self._select(self._descriptors)
189
+ noun = self._select(self._nouns)
190
+ numbers = "".join((self._select(chars) for _ in range(length)))
191
+ return delim.join([descriptor, noun, numbers])
192
+
193
+ def _select(self, select_from):
194
+ """select an element from a list using random.choice
195
+
196
+ Parameters
197
+ ==========
198
+ should be a list of things to select from
199
+ """
200
+ if not select_from:
201
+ return ""
202
+
203
+ return choice(select_from)
204
+
205
+
206
+ def generate_name():
207
+ namer = JobNamer()
208
+ return namer.generate()