malcolm3utils 0.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2 @@
1
+ __version__ = "0.5.5"
2
+ __version_message__ = "%(prog)s, malcolm3utils version %(version)s"
malcolm3utils/py.typed ADDED
File without changes
@@ -0,0 +1,100 @@
1
+ #!/usr/bin/python
2
+
3
+ import csv
4
+ import sys
5
+ from pathlib import Path
6
+ from typing import List, Optional, Tuple
7
+
8
+ import click
9
+
10
+ from .. import __version__, __version_message__
11
+
12
+
13
+ @click.command(
14
+ help="""
15
+ Read the specified file and write out just the specified columns to stdout.
16
+
17
+ The column_spec is a comma separated list of column headers, column indexes (one-based),
18
+ or column ranges (e.g. 4-6 for columns 4 through 6 inclusive).
19
+
20
+ If no file_to_read is specified, then input is read from stdin.
21
+ """
22
+ )
23
+ @click.option(
24
+ "-d", "--delimiter", type=str, help="column delimiter (default=TAB)", default="\t"
25
+ )
26
+ @click.option(
27
+ "-o",
28
+ "--output-delimiter",
29
+ type=str,
30
+ help="output column delimiter (default=input delimiter)",
31
+ )
32
+ @click.version_option(__version__, message=__version_message__)
33
+ @click.argument("column_spec", type=str, required=True)
34
+ @click.argument("file_to_read", type=click.Path(exists=True), required=False)
35
+ def getcol(
36
+ column_spec: str,
37
+ file_to_read: Optional[Path] = None,
38
+ delimiter: str = "\t",
39
+ output_delimiter: Optional[str] = None,
40
+ ) -> None:
41
+ if output_delimiter is None:
42
+ output_delimiter = delimiter
43
+ column_list, includes_headers = _parse_column_spec(column_spec)
44
+ writer = csv.writer(sys.stdout, delimiter=output_delimiter)
45
+ try:
46
+ fh = sys.stdin
47
+ if file_to_read is not None:
48
+ fh = open(file_to_read)
49
+ reader = csv.reader(fh, delimiter=delimiter)
50
+
51
+ for irow, row in enumerate(reader):
52
+ if irow == 0 and includes_headers:
53
+ column_list = _process_headers(column_list, row)
54
+ output_row = [row[int(i)] for i in column_list]
55
+ writer.writerow(output_row)
56
+ finally:
57
+ if fh is not None:
58
+ fh.close()
59
+
60
+
61
+ def _parse_column_spec(column_spec: str) -> Tuple[List[str | int], bool]:
62
+ column_list: List[str | int] = []
63
+ includes_headers = False
64
+ for spec in column_spec.split(","):
65
+ if "-" in spec:
66
+ range_parts = spec.split("-", 1)
67
+ if (
68
+ len(range_parts) == 2
69
+ and range_parts[0].isnumeric()
70
+ and range_parts[1].isnumeric()
71
+ ):
72
+ column_list.extend(range(int(range_parts[0]) - 1, int(range_parts[1])))
73
+ else:
74
+ column_list.append(spec)
75
+ includes_headers = True
76
+ elif spec.isnumeric():
77
+ column_list.append(int(spec) - 1)
78
+ else:
79
+ column_list.append(spec)
80
+ includes_headers = True
81
+ return column_list, includes_headers
82
+
83
+
84
+ def _process_headers(
85
+ column_list: List[str | int], headers: List[str]
86
+ ) -> List[str | int]:
87
+ updated_column_list: List[str | int] = []
88
+ for col in column_list:
89
+ if isinstance(col, str):
90
+ try:
91
+ updated_column_list.append(headers.index(col))
92
+ except ValueError:
93
+ pass
94
+ elif isinstance(col, int):
95
+ updated_column_list.append(col)
96
+ return updated_column_list
97
+
98
+
99
+ if __name__ == "__main__":
100
+ getcol() # pragma: no cover
@@ -0,0 +1,201 @@
1
+ #!/usr/bin/python
2
+
3
+ import csv
4
+ import logging
5
+ import sys
6
+ from typing import Dict, Iterable, List, Optional, TextIO
7
+
8
+ import click
9
+ import click_logging
10
+
11
+ from .. import __version__, __version_message__
12
+
13
+ logger = logging.getLogger(__name__)
14
+ click_logging.basic_config(logger)
15
+
16
+
17
+ @click.command(
18
+ help="""
19
+ Merge the specified delimited files with column headings, joining entries with
20
+ the same key field value.
21
+
22
+ The files do not need to be sorted on the key field as with join(1). This does
23
+ require that all of the data be read into memory. If that is a problem, using
24
+ the system join(1) command is recommended.
25
+
26
+ Rows will be printed in the order that the unique key values are encountered
27
+ when reading through the input files.
28
+
29
+ To read from stdin, use '-' as the filename.
30
+
31
+ The output key column will be the first column of the output file and the
32
+ header will be the header from the first file.
33
+
34
+ If -k is used to specify alternative keys columns for subsequent files, but
35
+ those files have a column with the same name as the output key column, that
36
+ will be ignored.
37
+ """
38
+ )
39
+ @click_logging.simple_verbosity_option(logger)
40
+ @click.option(
41
+ "-d", "--delimiter", type=str, help="column delimiter (default=TAB)", default="\t"
42
+ )
43
+ @click.option(
44
+ "-o",
45
+ "--output-delimiter",
46
+ type=str,
47
+ help="output column delimiter (default=input delimiter)",
48
+ )
49
+ @click.option(
50
+ "--all-delimiter",
51
+ type=str,
52
+ help='when keep=="all" this will be the delimiter between entries where there are multiple '
53
+ '(default=";")',
54
+ default=";",
55
+ )
56
+ @click.option(
57
+ "-k",
58
+ "--key-column",
59
+ type=str,
60
+ help="comma separated list of key column identifiers. "
61
+ "each new file will use the next identifier. "
62
+ "the last identifier will be used for all remaining files, "
63
+ 'so just use "-k identifier" if the identifier is the same for all files. '
64
+ "The identifier can either be the header string or the one-based column index. "
65
+ "(default=1 (i.e. the first column of each file))",
66
+ default="1",
67
+ )
68
+ @click.option(
69
+ "--keep",
70
+ type=click.Choice(["first", "last", "uniq", "all"], case_sensitive=False),
71
+ default="all",
72
+ help="specifies how to handle multiple values for the same field with the same key",
73
+ )
74
+ @click.option(
75
+ "-I",
76
+ "--ignore",
77
+ type=str,
78
+ help="comma separated list of column identifiers to ignore",
79
+ )
80
+ @click.version_option(__version__, message=__version_message__)
81
+ @click.argument("files_to_read", nargs=-1, type=click.File("r"), required=False)
82
+ def merge(
83
+ files_to_read: Iterable[TextIO] = (),
84
+ key_column: str = "1",
85
+ delimiter: str = "\t",
86
+ output_delimiter: Optional[str] = None,
87
+ keep: str = "all",
88
+ all_delimiter: str = ";",
89
+ ignore: str | None = None,
90
+ ) -> None:
91
+ if output_delimiter is None:
92
+ output_delimiter = delimiter
93
+ key_column_list = key_column.split(",")
94
+ ignore_set = set()
95
+ if ignore is not None:
96
+ ignore_set.update(ignore.split(","))
97
+
98
+ data: Dict[str, Dict[str, str]] = {}
99
+ output_key = None
100
+ data_field_list = []
101
+ for ifile, fh in enumerate(files_to_read):
102
+ logger.debug('processing file "%s"', fh.name)
103
+ if ifile >= len(key_column_list):
104
+ ifile = -1
105
+ key = key_column_list[ifile]
106
+ reader = csv.DictReader(fh, delimiter=delimiter)
107
+ if reader.fieldnames is None:
108
+ logger.warning('No fieldnames found in file "%s", skipping file.', fh.name)
109
+ continue
110
+ this_data_field_list = [x for x in reader.fieldnames if x not in ignore_set]
111
+ if key.isnumeric():
112
+ key = this_data_field_list[int(key) - 1]
113
+ elif key not in this_data_field_list:
114
+ logger.warning(
115
+ 'Key "%s" not found in file "%s", skipping file.', key, fh.name
116
+ )
117
+ continue
118
+ logger.debug('...using key "%s"', key)
119
+ this_data_field_list.remove(key)
120
+ if output_key is None:
121
+ output_key = key
122
+ data_field_list.append(output_key)
123
+ if output_key in this_data_field_list:
124
+ this_data_field_list.remove(output_key)
125
+ _process_rows(
126
+ reader,
127
+ fh.name,
128
+ key,
129
+ output_key,
130
+ keep,
131
+ all_delimiter,
132
+ this_data_field_list,
133
+ data,
134
+ )
135
+ data_field_list.extend(
136
+ [x for x in this_data_field_list if x not in data_field_list]
137
+ )
138
+
139
+ logger.debug("writing output")
140
+ writer = csv.DictWriter(
141
+ sys.stdout, fieldnames=data_field_list, delimiter=output_delimiter
142
+ )
143
+ writer.writeheader()
144
+ writer.writerows(data.values())
145
+
146
+
147
+ def _process_rows(
148
+ reader: csv.DictReader,
149
+ fname: str,
150
+ key: str,
151
+ output_key: str,
152
+ keep: str,
153
+ all_delimiter: str,
154
+ data_field_list: List[str],
155
+ data: Dict[str, Dict[str, str]],
156
+ ) -> None:
157
+ irow = 0
158
+ for irow, row in enumerate(reader):
159
+ key_value = row.get(key, None)
160
+ if key_value is None or len(key_value) == 0:
161
+ logger.warning(
162
+ 'No key value found for line %d in file "%s", skipping line.',
163
+ irow + 2,
164
+ fname,
165
+ )
166
+ continue
167
+ if key_value not in data:
168
+ data[key_value] = {output_key: key_value}
169
+ entry = data[key_value]
170
+ _process_row(row, data_field_list, keep, all_delimiter, entry)
171
+
172
+ logger.debug("...processed %d entries", irow + 1)
173
+ logger.debug("...total unique entries is now %d", len(data))
174
+
175
+
176
+ def _process_row(
177
+ row: Dict[str, str],
178
+ data_field_list: List[str],
179
+ keep: str,
180
+ all_delimiter: str,
181
+ entry: Dict[str, str],
182
+ ) -> None:
183
+ for data_field in data_field_list:
184
+ data_value = row[data_field]
185
+ if data_value is None or len(data_value) == 0:
186
+ pass
187
+ elif data_field not in entry or keep == "last":
188
+ entry[data_field] = data_value
189
+ elif keep == "all":
190
+ entry[data_field] += all_delimiter
191
+ entry[data_field] += data_value
192
+ elif keep == "uniq":
193
+ if data_value not in entry[data_field].split(all_delimiter):
194
+ entry[data_field] += all_delimiter
195
+ entry[data_field] += data_value
196
+ else: # keep == 'first' so ignore subsequent values
197
+ pass
198
+
199
+
200
+ if __name__ == "__main__":
201
+ merge() # pragma: no cover
@@ -0,0 +1,153 @@
1
+ #!/usr/bin/python
2
+
3
+ import os
4
+ from collections.abc import Iterable
5
+ from fnmatch import fnmatch
6
+ from pathlib import Path
7
+
8
+ import click
9
+
10
+ from .. import __version__, __version_message__
11
+
12
+ # be sure to update docstring if you change DEFAULT_IGNORE_GLOBS
13
+ DEFAULT_IGNORE_GLOBS = ["*~", "*.pyc", "#*", ".*", "*.OLD", "OLD"]
14
+
15
+
16
+ @click.command()
17
+ @click.option(
18
+ "-i",
19
+ "--ignore",
20
+ "ignore_patterns",
21
+ multiple=True,
22
+ type=str,
23
+ help="glob patterns to ignore (likely needs to be quoted, and can be repeated)",
24
+ )
25
+ @click.option(
26
+ "-f",
27
+ "--ignore-file",
28
+ "ignore_pattern_files",
29
+ multiple=True,
30
+ type=click.Path(exists=True),
31
+ help="file with glob patterns (one per line) to ignore (can be repeated)",
32
+ )
33
+ @click.option(
34
+ "-n", "--no-default-ignore", is_flag=True, help="do not use default ignore globs"
35
+ )
36
+ @click.version_option(__version__, message=__version_message__)
37
+ @click.argument("touch_file", type=str)
38
+ @click.argument("paths_to_check", nargs=-1, type=click.Path(exists=True), required=True)
39
+ def touch_latest(
40
+ touch_file: str,
41
+ paths_to_check: list[str | Path],
42
+ ignore_patterns: Iterable[str] = (),
43
+ ignore_pattern_files: Iterable[str | Path] = (),
44
+ no_default_ignore: bool = False,
45
+ ) -> None:
46
+ """
47
+ Find the latest changed date of file under the specified PATHS_TO_CHECK
48
+ and touch the TOUCH_FILE with that date (creating it if necessary).
49
+
50
+ Files that match ignore patterns will be ignored when locating searching
51
+ for the file with the latest change date.
52
+ Patterns that contain slashes either need to be absolute (i.e. start
53
+ with a slash) or they need to start with an asterisk in order
54
+ to match anything. So any such pattern that doesn't have either
55
+ will have an asterisk prepended.
56
+
57
+ Directories which match an ignore pattern will not be traversed.
58
+ Paths can be specified to ignore only from specific directories,
59
+ e.g. '*/test/*.out'.
60
+
61
+ Default ignore globs: '*~', '*.pyc', '#*', '.*' '*.OLD' 'OLD'
62
+
63
+ \b
64
+ touch_file: file to be touchead with the latest date
65
+ paths_to_check: paths to search for the latest change date
66
+ \f
67
+
68
+ :param touch_file: file to be touchead with the latest date
69
+ :param paths_to_check: paths to search for the latest change date
70
+ :param ignore_patterns: glob patterns to ignore
71
+ :param ignore_pattern_files: files of glob patterns to ignore
72
+ :param no_default_ignore: if True do not include default glob patterns
73
+
74
+ """
75
+ all_ignore_patterns = IgnorePatterns()
76
+
77
+ if not no_default_ignore:
78
+ all_ignore_patterns.add_patterns(DEFAULT_IGNORE_GLOBS)
79
+ for fn in ignore_pattern_files:
80
+ with open(fn) as fh:
81
+ all_ignore_patterns.add_patterns(fh)
82
+ all_ignore_patterns.add_patterns(ignore_patterns)
83
+
84
+ latest_timestamp = 0
85
+
86
+ for path in paths_to_check:
87
+ apath = os.path.abspath(path)
88
+ for root, dirs, files in os.walk(apath):
89
+ dirs[:] = [dn for dn in dirs if not all_ignore_patterns.ignore(root, dn)]
90
+ for fn in files:
91
+ if not all_ignore_patterns.ignore(root, fn):
92
+ statinfo = os.stat(os.path.join(root, fn))
93
+ if statinfo.st_mtime > latest_timestamp:
94
+ latest_timestamp = int(statinfo.st_mtime)
95
+ if not os.path.exists(touch_file):
96
+ with open(touch_file, "w"):
97
+ pass
98
+ os.utime(touch_file, (latest_timestamp, latest_timestamp))
99
+
100
+
101
+ class IgnorePatterns:
102
+ """
103
+ Class to handle checking glob patterns to be ignored
104
+ """
105
+
106
+ def __init__(self, patterns: Iterable[str] = ()) -> None:
107
+ self.names: list[str] = []
108
+ self.paths: list[str] = []
109
+ self.add_patterns(patterns)
110
+
111
+ def add_patterns(self, patterns: Iterable[str]) -> None:
112
+ """
113
+ Add these patterns to the list of glob patterns to be ignored.
114
+
115
+ Whitespace is stripped from the ends of each pattern since they may have been read from a file.
116
+
117
+ Patterns that contain a '/' must either start with a '/' (i.e. be absolute), of start
118
+ with a '*', or there is no chance of a match.
119
+ Accordingly, patterns that contain a '/' and start with something else have '*' prepended to them.
120
+
121
+ :param patterns: list of glob patterns to be ignored
122
+ :return: None
123
+ """
124
+ for pattern in patterns:
125
+ pattern = pattern.strip()
126
+ if "/" in pattern:
127
+ if pattern[0] not in "/*":
128
+ pattern = "*" + pattern
129
+ self.paths.append(pattern)
130
+ else:
131
+ self.names.append(pattern)
132
+
133
+ def ignore(self, dn: str, fn: str) -> bool:
134
+ """
135
+ Check to see whether to exclude the file named fn in directory namded dn
136
+ should be ignored or not.
137
+
138
+ :param dn: directory name
139
+ :param fn: file name
140
+ :return: True if the path matches an ignore pattern, False otherwise
141
+ """
142
+ for ignore_name in self.names:
143
+ if fnmatch(fn, ignore_name):
144
+ return True
145
+ path = os.path.join(dn, fn)
146
+ for ignore_path in self.paths:
147
+ if fnmatch(path, ignore_path):
148
+ return True
149
+ return False
150
+
151
+
152
+ if __name__ == "__main__":
153
+ touch_latest() # pragma: no cover
@@ -0,0 +1,20 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2021 Malcolm E. Davis <mnjjunk@comcast.net>
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
6
+ this software and associated documentation files (the "Software"), to deal in
7
+ the Software without restriction, including without limitation the rights to
8
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9
+ the Software, and to permit persons to whom the Software is furnished to do so,
10
+ subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17
+ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18
+ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,131 @@
1
+ Metadata-Version: 2.3
2
+ Name: malcolm3utils
3
+ Version: 0.5.5
4
+ Summary: Collection of Utility Scripts and Packages
5
+ License: BSD-3-Clause
6
+ Author: Malcolm E. Davis
7
+ Author-email: mnjjunk@comcast.net
8
+ Requires-Python: >=3.9,<4.0
9
+ Classifier: Development Status :: 4 - Beta
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: License :: OSI Approved :: BSD License
12
+ Classifier: Operating System :: OS Independent
13
+ Classifier: Programming Language :: Python
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.9
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Classifier: Programming Language :: Python :: 3.13
20
+ Classifier: Topic :: Desktop Environment
21
+ Classifier: Typing :: Typed
22
+ Requires-Dist: click-logging (>=1.0.1,<2.0.0)
23
+ Project-URL: Documentation, https://malcolm-3.github.io/malcolm3utils
24
+ Project-URL: Homepage, https://malcolm-3.github.io/malcolm3utils
25
+ Project-URL: Repository, https://github.com/malcolm-3/malcolm3utils
26
+ Description-Content-Type: text/markdown
27
+
28
+ # Malcolm3Utils
29
+
30
+ [![PyPI](https://img.shields.io/pypi/v/malcolm3utils?style=flat-square)](https://pypi.python.org/pypi/malcolm3utils/)
31
+ [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/malcolm3utils?style=flat-square)](https://pypi.python.org/pypi/malcolm3utils/)
32
+ [![PyPI - License](https://img.shields.io/pypi/l/malcolm3utils?style=flat-square)](https://pypi.python.org/pypi/malcolm3utils/)
33
+ [![Coookiecutter - Wolt](https://img.shields.io/badge/cookiecutter-Wolt-00c2e8?style=flat-square&logo=cookiecutter&logoColor=D4AA00&link=https://github.com/woltapp/wolt-python-package-cookiecutter)](https://github.com/woltapp/wolt-python-package-cookiecutter)
34
+
35
+
36
+ ---
37
+
38
+ **Documentation**: [https://malcolm-3.github.io/malcolm3utils](https://malcolm-3.github.io/malcolm3utils)
39
+
40
+ **Source Code**: [https://github.com/malcolm-3/malcolm3utils](https://github.com/malcolm-3/malcolm3utils)
41
+
42
+ **PyPI**: [https://pypi.org/project/malcolm3utils/](https://pypi.org/project/malcolm3utils/)
43
+
44
+ ---
45
+
46
+ Collection of Utility Scripts and Packages
47
+
48
+ ## Installation
49
+
50
+ ```sh
51
+ pip install malcolm3utils
52
+ ```
53
+
54
+ ## Usage
55
+
56
+ This package provides the following command line tools
57
+
58
+ - ``touch_latest``
59
+ - This touches a marker file with the timestamp of the most recently changed file under the specified directories
60
+ - ``getcol``
61
+ - A tool for extracting columns of data by column header name or column id
62
+ - ``merge``
63
+ - A version of the ``join`` command that doesn't require pre-sorting
64
+
65
+ ## Development
66
+
67
+ * Clone this repository
68
+ * Requirements:
69
+ * [Poetry](https://python-poetry.org/)
70
+ * Python 3.9+
71
+ * Create a virtual environment and install the dependencies
72
+
73
+ ```sh
74
+ poetry install
75
+ ```
76
+
77
+ * Activate the virtual environment
78
+
79
+ ```sh
80
+ poetry shell
81
+ ```
82
+
83
+ ### Testing
84
+
85
+ ```sh
86
+ pytest
87
+ ```
88
+
89
+ ### Documentation
90
+
91
+ The documentation is automatically generated from the content of the `docs` directory and from the docstrings
92
+ of the public signatures of the source code. The documentation is updated and published as a [Github project page
93
+ ](https://pages.github.com/) automatically as part each release.
94
+
95
+ ### Releasing
96
+
97
+ Trigger the [Draft release workflow](https://github.com/malcolm-3/malcolm3utils/actions/workflows/draft_release.yml)
98
+ (press _Run workflow_). This will update the changelog & version and create a GitHub release which is in _Draft_ state.
99
+
100
+ Find the draft release from the
101
+ [GitHub releases](https://github.com/malcolm-3/malcolm3utils/releases) and publish it. When
102
+ a release is published, it'll trigger [release](https://github.com/malcolm-3/malcolm3utils/blob/master/.github/workflows/release.yml) workflow which creates PyPI
103
+ release and deploys updated documentation.
104
+
105
+ ### Pre-commit
106
+
107
+ Pre-commit hooks run all the auto-formatters (e.g. `black`, `isort`), linters (e.g. `mypy`, `flake8`), and other quality
108
+ checks to make sure the changeset is in good shape before a commit/push happens.
109
+
110
+ You can install the hooks with (runs for each commit):
111
+
112
+ ```sh
113
+ pre-commit install
114
+ ```
115
+
116
+ Or if you want them to run only for each push:
117
+
118
+ ```sh
119
+ pre-commit install -t pre-push
120
+ ```
121
+
122
+ Or if you want e.g. want to run all checks manually for all files:
123
+
124
+ ```sh
125
+ pre-commit run --all-files
126
+ ```
127
+
128
+ ---
129
+
130
+ This project was generated using the [python-package-cookiecutter](https://github.com/collijk/python-package-cookiecutter) template.
131
+
@@ -0,0 +1,10 @@
1
+ malcolm3utils/__init__.py,sha256=nabtuXFhto3Ig2RSu2WL4sr9-DD72QlX0S2cPaJxjCQ,90
2
+ malcolm3utils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ malcolm3utils/scripts/getcol.py,sha256=Q_fszFHI1NmbN8v-Va13UjctnmL63m8HFLxPHNQCK9w,3087
4
+ malcolm3utils/scripts/merge.py,sha256=oUv26eHqByEsLWj05j85Ayn51G8BlqAAOv4M1if2S_c,6358
5
+ malcolm3utils/scripts/touch_latest.py,sha256=Wb9YJKcrsp_mGuJkYYRb0tjKsRZwiay2BGhaZLXZZCc,5273
6
+ malcolm3utils-0.5.5.dist-info/LICENCE,sha256=LGjd6BjR_IDgNeEnz7XOVD2CQNpiGcwxZidsFvP0KiM,1105
7
+ malcolm3utils-0.5.5.dist-info/METADATA,sha256=yTzsH1ks7mUeGXZlYcAa3zFGSbeMTuEKE7COyGt2K0c,4395
8
+ malcolm3utils-0.5.5.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
9
+ malcolm3utils-0.5.5.dist-info/entry_points.txt,sha256=FSdDidQCKW8lbuRjSZOO5ef76lcCNk5DhJWMtxuXXq4,163
10
+ malcolm3utils-0.5.5.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: poetry-core 2.1.3
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,5 @@
1
+ [console_scripts]
2
+ getcol=malcolm3utils.scripts.getcol:getcol
3
+ merge=malcolm3utils.scripts.merge:merge
4
+ touch_latest=malcolm3utils.scripts.touch_latest:touch_latest
5
+