flatdir 0.0.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flatdir-0.0.6/LICENSE +21 -0
- flatdir-0.0.6/PKG-INFO +48 -0
- flatdir-0.0.6/README.md +106 -0
- flatdir-0.0.6/docs/index.md +27 -0
- flatdir-0.0.6/pyproject.toml +47 -0
- flatdir-0.0.6/setup.cfg +4 -0
- flatdir-0.0.6/src/flatdir/__init__.py +1 -0
- flatdir-0.0.6/src/flatdir/__main__.py +89 -0
- flatdir-0.0.6/src/flatdir/listing.py +66 -0
- flatdir-0.0.6/src/flatdir/plugins/__init__.py +0 -0
- flatdir-0.0.6/src/flatdir/plugins/defaults.py +38 -0
- flatdir-0.0.6/src/flatdir/plugins/filename_length.py +8 -0
- flatdir-0.0.6/src/flatdir/plugins_loader.py +52 -0
- flatdir-0.0.6/src/flatdir.egg-info/PKG-INFO +48 -0
- flatdir-0.0.6/src/flatdir.egg-info/SOURCES.txt +21 -0
- flatdir-0.0.6/src/flatdir.egg-info/dependency_links.txt +1 -0
- flatdir-0.0.6/src/flatdir.egg-info/top_level.txt +1 -0
- flatdir-0.0.6/tests/test_depth.py +42 -0
- flatdir-0.0.6/tests/test_fields.py +147 -0
- flatdir-0.0.6/tests/test_json_output.py +16 -0
- flatdir-0.0.6/tests/test_limit.py +26 -0
- flatdir-0.0.6/tests/test_output.py +85 -0
- flatdir-0.0.6/tests/test_version.py +7 -0
flatdir-0.0.6/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2019 Romain Vuillemot
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
flatdir-0.0.6/PKG-INFO
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: flatdir
|
|
3
|
+
Version: 0.0.6
|
|
4
|
+
Summary: A Python library to create a flat JSON index of files and directories.
|
|
5
|
+
Author: Your Name
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/romsson/flatdir
|
|
8
|
+
Project-URL: Repository, https://github.com/romsson/flatdir
|
|
9
|
+
Classifier: Natural Language :: English
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
16
|
+
Classifier: Typing :: Typed
|
|
17
|
+
Requires-Python: <4,>=3.10
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
License-File: LICENSE
|
|
20
|
+
Dynamic: license-file
|
|
21
|
+
|
|
22
|
+
# flatdir
|
|
23
|
+
|
|
24
|
+
`flatdir` scans a directory tree and generates a flat JSON file with metadata for each entry.
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
# flatdir
|
|
29
|
+
|
|
30
|
+
`flatdir` scans a directory tree and generates a flat JSON file with metadata for each entry.
|
|
31
|
+
|
|
32
|
+
## Installation
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
pip install -e .
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Or from PyPI:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
pip install flatdir
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Usage
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
python -m flatdir .
|
|
48
|
+
```
|
flatdir-0.0.6/README.md
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
# flatdir
|
|
2
|
+
|
|
3
|
+
A Python library to create a flat JSON index of files and directories
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install -e .
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Or from PyPI:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
pip install flatdir
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Usage
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
python -m flatdir .
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
Returns a JSON file with metadata for each entry in the current directory and its subdirectories.
|
|
24
|
+
|
|
25
|
+
```json
|
|
26
|
+
(base) rvuillem@Romains-MacBook-Pro flatdir % python -m flatdir .
|
|
27
|
+
[
|
|
28
|
+
{
|
|
29
|
+
"name": ".DS_Store",
|
|
30
|
+
"type": "file",
|
|
31
|
+
"mtime": "Mon, 23 Feb 2026 13:12:54 GMT",
|
|
32
|
+
"size": 6148
|
|
33
|
+
},
|
|
34
|
+
...
|
|
35
|
+
]
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
`--limit N` to limit the number of entries processed:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
python -m flatdir . --limit 10
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
`--depth N` to limit the depth of the directory tree:
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
python -m flatdir . --depth 2
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
`--output FILE` to write the result to a file:
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
python -m flatdir . --output flat.json
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
`--fields FILE` to add custom fields via a plugin file:
|
|
57
|
+
|
|
58
|
+
```bash
|
|
59
|
+
python -m flatdir . --fields my_fields.py
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
The plugin file is a Python file where each public function becomes a JSON field.
|
|
63
|
+
Each function receives the entry `Path` and the `root` directory path.
|
|
64
|
+
Return `None` to omit the field from the output:
|
|
65
|
+
|
|
66
|
+
```python
|
|
67
|
+
# my_fields.py
|
|
68
|
+
from pathlib import Path
|
|
69
|
+
|
|
70
|
+
def ext(path: Path, root: Path) -> str:
|
|
71
|
+
return path.suffix
|
|
72
|
+
|
|
73
|
+
def line_count(path: Path, root: Path) -> int | None:
|
|
74
|
+
if path.is_dir():
|
|
75
|
+
return None
|
|
76
|
+
return len(path.read_text().splitlines())
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
Output (both files and directories are listed):
|
|
80
|
+
|
|
81
|
+
```json
|
|
82
|
+
[
|
|
83
|
+
{
|
|
84
|
+
"name": "docs",
|
|
85
|
+
"type": "directory",
|
|
86
|
+
"mtime": "Mon, 23 Feb 2026 13:12:54 GMT"
|
|
87
|
+
},
|
|
88
|
+
{
|
|
89
|
+
"name": "README.md",
|
|
90
|
+
"type": "file",
|
|
91
|
+
"mtime": "Mon, 23 Feb 2026 13:12:54 GMT",
|
|
92
|
+
"size": 835,
|
|
93
|
+
"ext": ".md",
|
|
94
|
+
"line_count": 54
|
|
95
|
+
}
|
|
96
|
+
]
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
The default fields (`name`, `type`, `mtime`, `size`) are themselves plugins defined
|
|
100
|
+
in `src/flatdir/plugins/defaults.py`. Additional examples are in `src/flatdir/plugins/`.
|
|
101
|
+
|
|
102
|
+
All options can be combined:
|
|
103
|
+
|
|
104
|
+
```bash
|
|
105
|
+
python -m flatdir . --depth 0 --limit 10 --fields my_fields.py --output result.json
|
|
106
|
+
```
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# flatdir
|
|
2
|
+
|
|
3
|
+
`flatdir` scans a directory tree and generates a flat JSON file with metadata for each entry.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
# flatdir
|
|
8
|
+
|
|
9
|
+
`flatdir` scans a directory tree and generates a flat JSON file with metadata for each entry.
|
|
10
|
+
|
|
11
|
+
## Installation
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
pip install -e .
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
Or from PyPI:
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
pip install flatdir
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
python -m flatdir .
|
|
27
|
+
```
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "flatdir"
|
|
3
|
+
description = "A Python library to create a flat JSON index of files and directories."
|
|
4
|
+
readme = "docs/index.md"
|
|
5
|
+
requires-python = ">=3.10,<4"
|
|
6
|
+
license = "MIT"
|
|
7
|
+
authors = [{ name = "Your Name" }]
|
|
8
|
+
classifiers = [
|
|
9
|
+
"Natural Language :: English",
|
|
10
|
+
"Programming Language :: Python :: 3",
|
|
11
|
+
"Programming Language :: Python :: 3.10",
|
|
12
|
+
"Programming Language :: Python :: 3.11",
|
|
13
|
+
"Programming Language :: Python :: 3.12",
|
|
14
|
+
"Programming Language :: Python :: 3.13",
|
|
15
|
+
"Programming Language :: Python :: 3.14",
|
|
16
|
+
"Typing :: Typed",
|
|
17
|
+
]
|
|
18
|
+
dependencies = []
|
|
19
|
+
dynamic = ["version"]
|
|
20
|
+
|
|
21
|
+
[tool.setuptools.dynamic]
|
|
22
|
+
version = {attr = "flatdir.__version__"}
|
|
23
|
+
|
|
24
|
+
[project.urls]
|
|
25
|
+
Homepage = "https://github.com/romsson/flatdir"
|
|
26
|
+
Repository = "https://github.com/romsson/flatdir"
|
|
27
|
+
|
|
28
|
+
[dependency-groups]
|
|
29
|
+
tests = [
|
|
30
|
+
"coverage[toml]>=7,<8",
|
|
31
|
+
"httpx>=0.23,<1",
|
|
32
|
+
"pytest-mock>=3,<4",
|
|
33
|
+
"pytest>=8.1.1,<9",
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
[tool.coverage.run]
|
|
37
|
+
command_line = "-m pytest"
|
|
38
|
+
source_pkgs = ["flatdir", "tests"]
|
|
39
|
+
|
|
40
|
+
[tool.coverage.paths]
|
|
41
|
+
flatdir = ["src/flatdir"]
|
|
42
|
+
tests = ["tests"]
|
|
43
|
+
|
|
44
|
+
[tool.pytest.ini_options]
|
|
45
|
+
addopts = "-q"
|
|
46
|
+
minversion = "6.0"
|
|
47
|
+
testpaths = ["tests"]
|
flatdir-0.0.6/setup.cfg
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.0.6"
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
"""Command-line entrypoint for flatdir: print directory listing as JSON.
|
|
2
|
+
|
|
3
|
+
Usage: python -m flatdir [--limit N] [--depth N] [--output FILE] [--fields FILE] [path]
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import sys
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from .listing import list_entries
|
|
13
|
+
from .plugins_loader import load_fields_file
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def main(argv: list[str] | None = None) -> int:
|
|
17
|
+
argv = argv if argv is not None else sys.argv[1:]
|
|
18
|
+
|
|
19
|
+
# parse --limit flag if present
|
|
20
|
+
limit: int | None = None
|
|
21
|
+
if "--limit" in argv:
|
|
22
|
+
try:
|
|
23
|
+
idx = argv.index("--limit")
|
|
24
|
+
limit = int(argv[idx + 1])
|
|
25
|
+
argv = argv[:idx] + argv[idx + 2 :]
|
|
26
|
+
except (IndexError, ValueError):
|
|
27
|
+
print("error: --limit requires a valid integer argument", file=sys.stderr)
|
|
28
|
+
return 1
|
|
29
|
+
|
|
30
|
+
# parse --depth flag if present
|
|
31
|
+
depth: int | None = None
|
|
32
|
+
if "--depth" in argv:
|
|
33
|
+
try:
|
|
34
|
+
idx = argv.index("--depth")
|
|
35
|
+
depth = int(argv[idx + 1])
|
|
36
|
+
argv = argv[:idx] + argv[idx + 2 :]
|
|
37
|
+
except (IndexError, ValueError):
|
|
38
|
+
print("error: --depth requires a valid integer argument", file=sys.stderr)
|
|
39
|
+
return 1
|
|
40
|
+
|
|
41
|
+
# parse --output flag if present
|
|
42
|
+
output: str | None = None
|
|
43
|
+
if "--output" in argv:
|
|
44
|
+
try:
|
|
45
|
+
idx = argv.index("--output")
|
|
46
|
+
output = argv[idx + 1]
|
|
47
|
+
argv = argv[:idx] + argv[idx + 2 :]
|
|
48
|
+
except (IndexError, ValueError):
|
|
49
|
+
print("error: --output requires a file path argument", file=sys.stderr)
|
|
50
|
+
return 1
|
|
51
|
+
|
|
52
|
+
# parse --fields flag if present
|
|
53
|
+
fields = None
|
|
54
|
+
if "--fields" in argv:
|
|
55
|
+
try:
|
|
56
|
+
idx = argv.index("--fields")
|
|
57
|
+
fields_path = argv[idx + 1]
|
|
58
|
+
fields = load_fields_file(fields_path)
|
|
59
|
+
argv = argv[:idx] + argv[idx + 2 :]
|
|
60
|
+
except IndexError:
|
|
61
|
+
print("error: --fields requires a file path argument", file=sys.stderr)
|
|
62
|
+
return 1
|
|
63
|
+
except (FileNotFoundError, ImportError) as exc:
|
|
64
|
+
print(f"error: {exc}", file=sys.stderr)
|
|
65
|
+
return 1
|
|
66
|
+
|
|
67
|
+
path = Path(argv[0]) if argv else Path(".")
|
|
68
|
+
|
|
69
|
+
# error in case of missing path or path is not a directory
|
|
70
|
+
if not path.exists() or not path.is_dir():
|
|
71
|
+
print(f"path is not a directory: {path}", file=sys.stderr)
|
|
72
|
+
return 2
|
|
73
|
+
|
|
74
|
+
# generate the actual list of entries to be returned as JSON
|
|
75
|
+
entries = list_entries(path, limit=limit, depth=depth, fields=fields)
|
|
76
|
+
|
|
77
|
+
# write JSON to output file or stdout
|
|
78
|
+
if output is not None:
|
|
79
|
+
with open(output, "w", encoding="utf-8") as f:
|
|
80
|
+
json.dump(entries, f, ensure_ascii=False, indent=4)
|
|
81
|
+
f.write("\n")
|
|
82
|
+
else:
|
|
83
|
+
json.dump(entries, sys.stdout, ensure_ascii=False, indent=4)
|
|
84
|
+
_ = sys.stdout.write("\n")
|
|
85
|
+
return 0
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
if __name__ == "__main__":
|
|
89
|
+
raise SystemExit(main())
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""List entries in a directory and return metadata as a list of dict.
|
|
2
|
+
|
|
3
|
+
Each entry is a dict with keys determined by field plugins (defaults: name, type, mtime, size).
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import os
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
from .plugins import defaults as _defaults
|
|
12
|
+
from .plugins_loader import load_fields_file
|
|
13
|
+
|
|
14
|
+
# built-in default fields, loaded once
|
|
15
|
+
DEFAULT_FIELDS = load_fields_file(_defaults.__file__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def list_entries(
|
|
19
|
+
root: Path,
|
|
20
|
+
limit: int | None = None,
|
|
21
|
+
depth: int | None = None,
|
|
22
|
+
fields: dict[str, object] | None = None,
|
|
23
|
+
) -> list[dict[str, object]]:
|
|
24
|
+
entries: list[dict[str, object]] = []
|
|
25
|
+
root = root.resolve()
|
|
26
|
+
|
|
27
|
+
# merge default fields with custom fields (custom can override defaults)
|
|
28
|
+
all_fields = dict(DEFAULT_FIELDS)
|
|
29
|
+
if fields:
|
|
30
|
+
all_fields.update(fields)
|
|
31
|
+
|
|
32
|
+
for dirpath, dirnames, filenames in os.walk(root):
|
|
33
|
+
base = Path(dirpath)
|
|
34
|
+
# check depth constraint
|
|
35
|
+
current_depth = len(base.relative_to(root).parts)
|
|
36
|
+
if depth is not None and depth >= 0 and current_depth > depth:
|
|
37
|
+
continue
|
|
38
|
+
|
|
39
|
+
# list subdirectories at this level
|
|
40
|
+
for dirname in dirnames:
|
|
41
|
+
p = (base / dirname).resolve()
|
|
42
|
+
entry: dict[str, object] = {}
|
|
43
|
+
for field_name, func in all_fields.items():
|
|
44
|
+
value = func(p, root)
|
|
45
|
+
if value is not None:
|
|
46
|
+
entry[field_name] = value
|
|
47
|
+
entries.append(entry)
|
|
48
|
+
|
|
49
|
+
# list files at this level
|
|
50
|
+
for filename in filenames:
|
|
51
|
+
p = (base / filename).resolve()
|
|
52
|
+
entry = {}
|
|
53
|
+
for field_name, func in all_fields.items():
|
|
54
|
+
value = func(p, root)
|
|
55
|
+
if value is not None:
|
|
56
|
+
entry[field_name] = value
|
|
57
|
+
entries.append(entry)
|
|
58
|
+
|
|
59
|
+
# return a sorted list of entries
|
|
60
|
+
entries.sort(key=lambda e: str(e.get("name", "")))
|
|
61
|
+
|
|
62
|
+
# apply limit if provided
|
|
63
|
+
if limit is not None and limit >= 0:
|
|
64
|
+
entries = entries[:limit]
|
|
65
|
+
|
|
66
|
+
return entries
|
|
File without changes
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""Default field plugins for flatdir.
|
|
2
|
+
|
|
3
|
+
These functions produce the built-in fields: name, type, mtime, size.
|
|
4
|
+
Each function receives the file path and the root directory path.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import os
|
|
10
|
+
import time
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def name(path: Path, root: Path) -> str:
|
|
15
|
+
"""Relative path from the listing root."""
|
|
16
|
+
try:
|
|
17
|
+
return str(path.relative_to(root))
|
|
18
|
+
except ValueError:
|
|
19
|
+
return str(os.path.relpath(str(path), str(root)))
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def type(path: Path, root: Path) -> str:
|
|
23
|
+
"""Entry type: 'file' or 'directory'."""
|
|
24
|
+
return "directory" if path.is_dir() else "file"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def mtime(path: Path, root: Path) -> str:
|
|
28
|
+
"""Last modification time in HTTP-date format."""
|
|
29
|
+
st = path.stat()
|
|
30
|
+
t = time.gmtime(st.st_mtime)
|
|
31
|
+
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", t)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def size(path: Path, root: Path) -> int | None:
|
|
35
|
+
"""File size in bytes. Returns None for directories (omitted from output)."""
|
|
36
|
+
if path.is_dir():
|
|
37
|
+
return None
|
|
38
|
+
return int(path.stat().st_size)
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""Load custom field functions from a user-provided Python file.
|
|
2
|
+
|
|
3
|
+
A fields file is a plain Python module where each **public** function
|
|
4
|
+
(i.e. whose name does not start with ``_``) is treated as a field provider.
|
|
5
|
+
|
|
6
|
+
Each function must accept a single :class:`pathlib.Path` argument (the file
|
|
7
|
+
being listed) and return a JSON-serialisable value. The function name becomes
|
|
8
|
+
the field key in the output.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import importlib.util
|
|
14
|
+
import inspect
|
|
15
|
+
import sys
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Callable
|
|
18
|
+
|
|
19
|
+
FieldFunc = Callable[[Path], object]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def load_fields_file(filepath: str) -> dict[str, FieldFunc]:
|
|
23
|
+
"""Import *filepath* as a module and return its public callables.
|
|
24
|
+
|
|
25
|
+
Returns a ``{name: func}`` mapping for every public, non-class callable
|
|
26
|
+
defined in the file.
|
|
27
|
+
"""
|
|
28
|
+
path = Path(filepath).resolve()
|
|
29
|
+
if not path.is_file():
|
|
30
|
+
raise FileNotFoundError(f"fields file not found: {filepath}")
|
|
31
|
+
|
|
32
|
+
# import the file as a temporary module
|
|
33
|
+
spec = importlib.util.spec_from_file_location("_flatdir_fields", str(path))
|
|
34
|
+
if spec is None or spec.loader is None:
|
|
35
|
+
raise ImportError(f"cannot load fields file: {filepath}")
|
|
36
|
+
module = importlib.util.module_from_spec(spec)
|
|
37
|
+
sys.modules["_flatdir_fields"] = module
|
|
38
|
+
spec.loader.exec_module(module)
|
|
39
|
+
|
|
40
|
+
fields: dict[str, FieldFunc] = {}
|
|
41
|
+
for name, obj in inspect.getmembers(module, callable):
|
|
42
|
+
# skip private names, classes, and imported builtins
|
|
43
|
+
if name.startswith("_"):
|
|
44
|
+
continue
|
|
45
|
+
if inspect.isclass(obj):
|
|
46
|
+
continue
|
|
47
|
+
# only keep functions actually defined in this file
|
|
48
|
+
if getattr(obj, "__module__", None) != "_flatdir_fields":
|
|
49
|
+
continue
|
|
50
|
+
fields[name] = obj
|
|
51
|
+
|
|
52
|
+
return fields
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: flatdir
|
|
3
|
+
Version: 0.0.6
|
|
4
|
+
Summary: A Python library to create a flat JSON index of files and directories.
|
|
5
|
+
Author: Your Name
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/romsson/flatdir
|
|
8
|
+
Project-URL: Repository, https://github.com/romsson/flatdir
|
|
9
|
+
Classifier: Natural Language :: English
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
16
|
+
Classifier: Typing :: Typed
|
|
17
|
+
Requires-Python: <4,>=3.10
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
License-File: LICENSE
|
|
20
|
+
Dynamic: license-file
|
|
21
|
+
|
|
22
|
+
# flatdir
|
|
23
|
+
|
|
24
|
+
`flatdir` scans a directory tree and generates a flat JSON file with metadata for each entry.
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
# flatdir
|
|
29
|
+
|
|
30
|
+
`flatdir` scans a directory tree and generates a flat JSON file with metadata for each entry.
|
|
31
|
+
|
|
32
|
+
## Installation
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
pip install -e .
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Or from PyPI:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
pip install flatdir
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Usage
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
python -m flatdir .
|
|
48
|
+
```
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
docs/index.md
|
|
5
|
+
src/flatdir/__init__.py
|
|
6
|
+
src/flatdir/__main__.py
|
|
7
|
+
src/flatdir/listing.py
|
|
8
|
+
src/flatdir/plugins_loader.py
|
|
9
|
+
src/flatdir.egg-info/PKG-INFO
|
|
10
|
+
src/flatdir.egg-info/SOURCES.txt
|
|
11
|
+
src/flatdir.egg-info/dependency_links.txt
|
|
12
|
+
src/flatdir.egg-info/top_level.txt
|
|
13
|
+
src/flatdir/plugins/__init__.py
|
|
14
|
+
src/flatdir/plugins/defaults.py
|
|
15
|
+
src/flatdir/plugins/filename_length.py
|
|
16
|
+
tests/test_depth.py
|
|
17
|
+
tests/test_fields.py
|
|
18
|
+
tests/test_json_output.py
|
|
19
|
+
tests/test_limit.py
|
|
20
|
+
tests/test_output.py
|
|
21
|
+
tests/test_version.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
flatdir
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from flatdir import listing
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_list_entries_with_depth(tmp_path: Path):
|
|
7
|
+
"""Test that list_entries respects the depth parameter."""
|
|
8
|
+
# create a nested directory structure
|
|
9
|
+
# tmp_path/
|
|
10
|
+
# file0.txt
|
|
11
|
+
# sub1/
|
|
12
|
+
# file1.txt
|
|
13
|
+
# sub2/
|
|
14
|
+
# file2.txt
|
|
15
|
+
(tmp_path / "file0.txt").write_text("depth 0")
|
|
16
|
+
sub1 = tmp_path / "sub1"
|
|
17
|
+
sub1.mkdir()
|
|
18
|
+
(sub1 / "file1.txt").write_text("depth 1")
|
|
19
|
+
sub2 = sub1 / "sub2"
|
|
20
|
+
sub2.mkdir()
|
|
21
|
+
(sub2 / "file2.txt").write_text("depth 2")
|
|
22
|
+
|
|
23
|
+
# Test with depth=0 (root level only: file0.txt + sub1/)
|
|
24
|
+
depth0 = listing.list_entries(tmp_path, depth=0)
|
|
25
|
+
assert len(depth0) == 2
|
|
26
|
+
assert "file0.txt" in [e["name"] for e in depth0]
|
|
27
|
+
assert "sub1" in [e["name"] for e in depth0]
|
|
28
|
+
|
|
29
|
+
# Test with depth=1 (root + one level down)
|
|
30
|
+
depth1 = listing.list_entries(tmp_path, depth=1)
|
|
31
|
+
assert len(depth1) == 4 # file0.txt, sub1/, sub1/file1.txt, sub1/sub2/
|
|
32
|
+
names = {e["name"] for e in depth1}
|
|
33
|
+
assert "file0.txt" in names
|
|
34
|
+
assert any("file1.txt" in n for n in names)
|
|
35
|
+
|
|
36
|
+
# Test with depth=2 (root + two levels down)
|
|
37
|
+
depth2 = listing.list_entries(tmp_path, depth=2)
|
|
38
|
+
assert len(depth2) == 5 # file0.txt, sub1/, sub1/file1.txt, sub1/sub2/, sub1/sub2/file2.txt
|
|
39
|
+
|
|
40
|
+
# Test with no depth limit (None)
|
|
41
|
+
unlimited = listing.list_entries(tmp_path, depth=None)
|
|
42
|
+
assert len(unlimited) == 5
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"""Tests for the --fields plugin system."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from flatdir.__main__ import main
|
|
7
|
+
from flatdir.listing import list_entries
|
|
8
|
+
from flatdir.plugins_loader import load_fields_file
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# -- plugin loader tests --
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def test_load_fields_file_extracts_public_functions(tmp_path: Path):
|
|
15
|
+
"""Public functions in the fields file become field providers."""
|
|
16
|
+
fields_file = tmp_path / "my_fields.py"
|
|
17
|
+
fields_file.write_text(
|
|
18
|
+
"from pathlib import Path\n"
|
|
19
|
+
"def ext(path: Path, root: Path) -> str:\n"
|
|
20
|
+
" return path.suffix\n"
|
|
21
|
+
"def _private(path: Path, root: Path) -> str:\n"
|
|
22
|
+
" return 'hidden'\n"
|
|
23
|
+
)
|
|
24
|
+
fields = load_fields_file(str(fields_file))
|
|
25
|
+
assert "ext" in fields
|
|
26
|
+
assert "_private" not in fields
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def test_load_fields_file_ignores_classes(tmp_path: Path):
|
|
30
|
+
"""Classes should not be picked up as field providers."""
|
|
31
|
+
fields_file = tmp_path / "my_fields.py"
|
|
32
|
+
fields_file.write_text(
|
|
33
|
+
"from pathlib import Path\n"
|
|
34
|
+
"class Foo:\n"
|
|
35
|
+
" pass\n"
|
|
36
|
+
"def ext(path: Path, root: Path) -> str:\n"
|
|
37
|
+
" return path.suffix\n"
|
|
38
|
+
)
|
|
39
|
+
fields = load_fields_file(str(fields_file))
|
|
40
|
+
assert "ext" in fields
|
|
41
|
+
assert "Foo" not in fields
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def test_load_fields_file_not_found():
|
|
45
|
+
"""A missing fields file should raise FileNotFoundError."""
|
|
46
|
+
try:
|
|
47
|
+
load_fields_file("/nonexistent/fields.py")
|
|
48
|
+
assert False, "should have raised"
|
|
49
|
+
except FileNotFoundError:
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
# -- listing integration tests --
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def test_list_entries_with_custom_fields(tmp_path: Path):
|
|
57
|
+
"""Custom fields should appear in each entry."""
|
|
58
|
+
(tmp_path / "doc.txt").write_text("hello")
|
|
59
|
+
(tmp_path / "img.png").write_bytes(b"\x89PNG")
|
|
60
|
+
|
|
61
|
+
fields = {
|
|
62
|
+
"ext": lambda p, root: p.suffix,
|
|
63
|
+
"stem": lambda p, root: p.stem,
|
|
64
|
+
}
|
|
65
|
+
entries = list_entries(tmp_path, fields=fields)
|
|
66
|
+
assert len(entries) == 2
|
|
67
|
+
for entry in entries:
|
|
68
|
+
assert "ext" in entry
|
|
69
|
+
assert "stem" in entry
|
|
70
|
+
|
|
71
|
+
txt_entry = next(e for e in entries if e["name"] == "doc.txt")
|
|
72
|
+
assert txt_entry["ext"] == ".txt"
|
|
73
|
+
assert txt_entry["stem"] == "doc"
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def test_list_entries_without_fields_unchanged(tmp_path: Path):
|
|
77
|
+
"""Without fields, entries should have only the built-in keys."""
|
|
78
|
+
(tmp_path / "a.txt").write_text("a")
|
|
79
|
+
entries = list_entries(tmp_path)
|
|
80
|
+
assert set(entries[0].keys()) == {"name", "type", "mtime", "size"}
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
# -- CLI integration tests --
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def test_cli_fields_flag(tmp_path: Path, capsys):
|
|
87
|
+
"""--fields FILE should add custom fields to the JSON output."""
|
|
88
|
+
# create a fields plugin
|
|
89
|
+
fields_file = tmp_path / "my_fields.py"
|
|
90
|
+
fields_file.write_text(
|
|
91
|
+
"from pathlib import Path\n"
|
|
92
|
+
"def ext(path: Path, root: Path) -> str:\n"
|
|
93
|
+
" return path.suffix\n"
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
# create a directory to scan
|
|
97
|
+
scan_dir = tmp_path / "data"
|
|
98
|
+
scan_dir.mkdir()
|
|
99
|
+
(scan_dir / "notes.md").write_text("# Notes")
|
|
100
|
+
|
|
101
|
+
rc = main(["--fields", str(fields_file), str(scan_dir)])
|
|
102
|
+
assert rc == 0
|
|
103
|
+
|
|
104
|
+
captured = capsys.readouterr()
|
|
105
|
+
data = json.loads(captured.out)
|
|
106
|
+
assert len(data) == 1
|
|
107
|
+
assert data[0]["ext"] == ".md"
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def test_cli_fields_combined_with_output(tmp_path: Path):
|
|
111
|
+
"""--fields + --output should write enriched JSON to file."""
|
|
112
|
+
fields_file = tmp_path / "my_fields.py"
|
|
113
|
+
fields_file.write_text(
|
|
114
|
+
"from pathlib import Path\n"
|
|
115
|
+
"def ext(path: Path, root: Path) -> str:\n"
|
|
116
|
+
" return path.suffix\n"
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
scan_dir = tmp_path / "data"
|
|
120
|
+
scan_dir.mkdir()
|
|
121
|
+
(scan_dir / "a.py").write_text("pass")
|
|
122
|
+
|
|
123
|
+
out_file = tmp_path / "result.json"
|
|
124
|
+
rc = main([
|
|
125
|
+
"--fields", str(fields_file),
|
|
126
|
+
"--output", str(out_file),
|
|
127
|
+
str(scan_dir),
|
|
128
|
+
])
|
|
129
|
+
assert rc == 0
|
|
130
|
+
data = json.loads(out_file.read_text(encoding="utf-8"))
|
|
131
|
+
assert data[0]["ext"] == ".py"
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def test_cli_fields_missing_argument(capsys):
|
|
135
|
+
"""--fields without a file path should return error code 1."""
|
|
136
|
+
rc = main(["--fields"])
|
|
137
|
+
assert rc == 1
|
|
138
|
+
captured = capsys.readouterr()
|
|
139
|
+
assert "error" in captured.err.lower()
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def test_cli_fields_nonexistent_file(capsys):
|
|
143
|
+
"""--fields with a nonexistent file should return error code 1."""
|
|
144
|
+
rc = main(["--fields", "/nonexistent/fields.py", "."])
|
|
145
|
+
assert rc == 1
|
|
146
|
+
captured = capsys.readouterr()
|
|
147
|
+
assert "not found" in captured.err.lower()
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from flatdir import listing
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_outputs_valid_json(tmp_path: Path):
|
|
7
|
+
# create a small directory with one file
|
|
8
|
+
sub = tmp_path / "sub"
|
|
9
|
+
sub.mkdir()
|
|
10
|
+
f = sub / "a.txt"
|
|
11
|
+
_ = f.write_text("this is a test")
|
|
12
|
+
|
|
13
|
+
entries = listing.list_entries(tmp_path)
|
|
14
|
+
|
|
15
|
+
# ensure our file is present in the listing (cast name to str for typing)
|
|
16
|
+
assert any(str(entry.get("name", "")).endswith("a.txt") for entry in entries)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from flatdir import listing
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_list_entries_with_limit(tmp_path: Path):
|
|
7
|
+
"""Test that list_entries respects the limit parameter."""
|
|
8
|
+
# create a directory with 5 files
|
|
9
|
+
for i in range(5):
|
|
10
|
+
(tmp_path / f"file{i}.txt").write_text(f"content {i}")
|
|
11
|
+
|
|
12
|
+
# Test with limit
|
|
13
|
+
limited = listing.list_entries(tmp_path, limit=2)
|
|
14
|
+
assert len(limited) == 2
|
|
15
|
+
|
|
16
|
+
# Test without limit (None)
|
|
17
|
+
unlimited = listing.list_entries(tmp_path, limit=None)
|
|
18
|
+
assert len(unlimited) == 5
|
|
19
|
+
|
|
20
|
+
# Test with negative limit (should return all)
|
|
21
|
+
neg_limited = listing.list_entries(tmp_path, limit=-1)
|
|
22
|
+
assert len(neg_limited) == 5
|
|
23
|
+
|
|
24
|
+
# Test with zero limit (should return empty list)
|
|
25
|
+
zero_limited = listing.list_entries(tmp_path, limit=0)
|
|
26
|
+
assert len(zero_limited) == 0
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
"""Tests for the --output flag."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from flatdir.__main__ import main
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def test_output_writes_json_to_file(tmp_path: Path):
|
|
10
|
+
"""--output FILE should write JSON to the given file instead of stdout."""
|
|
11
|
+
# create a directory with a file
|
|
12
|
+
(tmp_path / "hello.txt").write_text("hello")
|
|
13
|
+
|
|
14
|
+
out_file = tmp_path / "result.json"
|
|
15
|
+
rc = main(["--output", str(out_file), str(tmp_path)])
|
|
16
|
+
|
|
17
|
+
assert rc == 0
|
|
18
|
+
assert out_file.exists()
|
|
19
|
+
data = json.loads(out_file.read_text(encoding="utf-8"))
|
|
20
|
+
assert isinstance(data, list)
|
|
21
|
+
assert len(data) == 1
|
|
22
|
+
assert data[0]["name"] == "hello.txt"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def test_output_creates_file_with_multiple_entries(tmp_path: Path):
|
|
26
|
+
"""--output should write all entries to the file."""
|
|
27
|
+
for i in range(3):
|
|
28
|
+
(tmp_path / f"file{i}.txt").write_text(f"content {i}")
|
|
29
|
+
|
|
30
|
+
out_file = tmp_path / "out.json"
|
|
31
|
+
rc = main(["--output", str(out_file), str(tmp_path)])
|
|
32
|
+
|
|
33
|
+
assert rc == 0
|
|
34
|
+
data = json.loads(out_file.read_text(encoding="utf-8"))
|
|
35
|
+
assert len(data) == 3
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def test_output_combined_with_limit(tmp_path: Path):
|
|
39
|
+
"""--output combined with --limit should write only limited entries."""
|
|
40
|
+
for i in range(5):
|
|
41
|
+
(tmp_path / f"file{i}.txt").write_text(f"content {i}")
|
|
42
|
+
|
|
43
|
+
out_file = tmp_path / "limited.json"
|
|
44
|
+
rc = main(["--limit", "2", "--output", str(out_file), str(tmp_path)])
|
|
45
|
+
|
|
46
|
+
assert rc == 0
|
|
47
|
+
data = json.loads(out_file.read_text(encoding="utf-8"))
|
|
48
|
+
assert len(data) == 2
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def test_output_combined_with_depth(tmp_path: Path):
|
|
52
|
+
"""--output combined with --depth should write only depth-limited entries."""
|
|
53
|
+
(tmp_path / "root.txt").write_text("root")
|
|
54
|
+
sub = tmp_path / "sub"
|
|
55
|
+
sub.mkdir()
|
|
56
|
+
(sub / "deep.txt").write_text("deep")
|
|
57
|
+
|
|
58
|
+
out_file = tmp_path / "shallow.json"
|
|
59
|
+
rc = main(["--depth", "0", "--output", str(out_file), str(tmp_path)])
|
|
60
|
+
|
|
61
|
+
assert rc == 0
|
|
62
|
+
data = json.loads(out_file.read_text(encoding="utf-8"))
|
|
63
|
+
assert len(data) == 2 # root.txt + sub/
|
|
64
|
+
assert any(e["name"] == "root.txt" for e in data)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def test_output_missing_argument(capsys):
|
|
68
|
+
"""--output without a file path should print an error and return 1."""
|
|
69
|
+
rc = main(["--output"])
|
|
70
|
+
|
|
71
|
+
assert rc == 1
|
|
72
|
+
captured = capsys.readouterr()
|
|
73
|
+
assert "error" in captured.err.lower()
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def test_no_output_flag_prints_to_stdout(tmp_path: Path, capsys):
|
|
77
|
+
"""Without --output, JSON should be printed to stdout."""
|
|
78
|
+
(tmp_path / "a.txt").write_text("a")
|
|
79
|
+
rc = main([str(tmp_path)])
|
|
80
|
+
|
|
81
|
+
assert rc == 0
|
|
82
|
+
captured = capsys.readouterr()
|
|
83
|
+
data = json.loads(captured.out)
|
|
84
|
+
assert isinstance(data, list)
|
|
85
|
+
assert len(data) == 1
|