path-sync 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,250 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import subprocess
5
+ from dataclasses import dataclass, field
6
+ from enum import StrEnum
7
+ from pathlib import Path
8
+
9
+ import typer
10
+ from git import Repo
11
+
12
+ from path_sync._internal import git_ops
13
+ from path_sync._internal.models import Destination, find_repo_root
14
+ from path_sync._internal.models_dep import (
15
+ DepConfig,
16
+ OnFailStrategy,
17
+ UpdateEntry,
18
+ VerifyConfig,
19
+ resolve_dep_config_path,
20
+ )
21
+ from path_sync._internal.typer_app import app
22
+ from path_sync._internal.yaml_utils import load_yaml_model
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+ MAX_STDERR_LINES = 20
27
+
28
+
29
+ class Status(StrEnum):
30
+ PASSED = "passed"
31
+ SKIPPED = "skipped"
32
+ WARN = "warn"
33
+ NO_CHANGES = "no_changes"
34
+ FAILED = "failed"
35
+
36
+
37
+ @dataclass
38
+ class StepFailure:
39
+ step: str
40
+ returncode: int
41
+ stderr: str
42
+ on_fail: OnFailStrategy
43
+
44
+ @classmethod
45
+ def from_error(cls, step: str, e: subprocess.CalledProcessError, on_fail: OnFailStrategy) -> StepFailure:
46
+ stderr = _truncate_stderr(e.stderr or "", MAX_STDERR_LINES)
47
+ return cls(step=step, returncode=e.returncode, stderr=stderr, on_fail=on_fail)
48
+
49
+
50
+ @dataclass
51
+ class RepoResult:
52
+ dest: Destination
53
+ repo_path: Path
54
+ status: Status
55
+ failures: list[StepFailure] = field(default_factory=list)
56
+
57
+
58
+ def _truncate_stderr(text: str, max_lines: int) -> str:
59
+ lines = text.strip().splitlines()
60
+ if len(lines) <= max_lines:
61
+ return text.strip()
62
+ skipped = len(lines) - max_lines
63
+ return f"... ({skipped} lines skipped)\n" + "\n".join(lines[-max_lines:])
64
+
65
+
66
+ @app.command()
67
+ def dep_update(
68
+ name: str = typer.Option(..., "-n", "--name", help="Config name"),
69
+ dest_filter: str = typer.Option("", "-d", "--dest", help="Filter destinations (comma-separated)"),
70
+ work_dir: str = typer.Option("", "--work-dir", help="Directory for cloning repos"),
71
+ dry_run: bool = typer.Option(False, "--dry-run", help="Preview without creating PRs"),
72
+ skip_verify: bool = typer.Option(False, "--skip-verify", help="Skip verification steps"),
73
+ src_root_opt: str = typer.Option("", "--src-root", help="Source repo root"),
74
+ ) -> None:
75
+ """Run dependency updates across repositories."""
76
+ src_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
77
+ config_path = resolve_dep_config_path(src_root, name)
78
+ if not config_path.exists():
79
+ logger.error(f"Config not found: {config_path}")
80
+ raise typer.Exit(1)
81
+
82
+ config = load_yaml_model(config_path, DepConfig)
83
+ destinations = config.load_destinations(src_root)
84
+
85
+ if dest_filter:
86
+ filter_names = [n.strip() for n in dest_filter.split(",")]
87
+ destinations = [d for d in destinations if d.name in filter_names]
88
+
89
+ results = _update_and_validate(config, destinations, src_root, work_dir, skip_verify)
90
+ _create_prs(config, results, dry_run)
91
+
92
+ if any(r.status == Status.SKIPPED for r in results):
93
+ raise typer.Exit(1)
94
+
95
+
96
+ def _update_and_validate(
97
+ config: DepConfig,
98
+ destinations: list[Destination],
99
+ src_root: Path,
100
+ work_dir: str,
101
+ skip_verify: bool,
102
+ ) -> list[RepoResult]:
103
+ results: list[RepoResult] = []
104
+
105
+ for dest in destinations:
106
+ result = _process_single_repo(config, dest, src_root, work_dir, skip_verify)
107
+
108
+ if result.status == Status.FAILED:
109
+ logger.error(f"{dest.name}: Verification failed, stopping")
110
+ raise typer.Exit(1)
111
+
112
+ if result.status != Status.NO_CHANGES:
113
+ results.append(result)
114
+
115
+ return results
116
+
117
+
118
+ def _process_single_repo(
119
+ config: DepConfig,
120
+ dest: Destination,
121
+ src_root: Path,
122
+ work_dir: str,
123
+ skip_verify: bool,
124
+ ) -> RepoResult:
125
+ logger.info(f"Processing {dest.name}...")
126
+ repo_path = _resolve_repo_path(dest, src_root, work_dir)
127
+ repo = _ensure_repo(dest, repo_path)
128
+ git_ops.prepare_copy_branch(repo, dest.default_branch, config.pr.branch, from_default=True)
129
+
130
+ if failure := _run_updates(config.updates, repo_path):
131
+ logger.warning(f"{dest.name}: Update failed with exit code {failure.returncode}")
132
+ return RepoResult(dest, repo_path, Status.SKIPPED, failures=[failure])
133
+
134
+ if not git_ops.has_changes(repo):
135
+ logger.info(f"{dest.name}: No changes, skipping")
136
+ return RepoResult(dest, repo_path, Status.NO_CHANGES)
137
+
138
+ git_ops.commit_changes(repo, config.pr.title)
139
+
140
+ if skip_verify:
141
+ return RepoResult(dest, repo_path, Status.PASSED)
142
+
143
+ return _verify_repo(repo, repo_path, config.verify, dest)
144
+
145
+
146
+ def _run_updates(updates: list[UpdateEntry], repo_path: Path) -> StepFailure | None:
147
+ """Returns StepFailure on failure, None on success."""
148
+ try:
149
+ for update in updates:
150
+ _run_command(update.command, repo_path / update.workdir)
151
+ return None
152
+ except subprocess.CalledProcessError as e:
153
+ return StepFailure.from_error(e.cmd, e, OnFailStrategy.SKIP)
154
+
155
+
156
+ def _verify_repo(repo: Repo, repo_path: Path, verify: VerifyConfig, dest: Destination) -> RepoResult:
157
+ status, failures = _run_verify_steps(repo, repo_path, verify)
158
+ return RepoResult(dest, repo_path, status, failures)
159
+
160
+
161
+ def _create_prs(config: DepConfig, results: list[RepoResult], dry_run: bool) -> None:
162
+ for result in results:
163
+ if result.status == Status.SKIPPED:
164
+ continue
165
+
166
+ if dry_run:
167
+ logger.info(f"[DRY RUN] Would create PR for {result.dest.name}")
168
+ continue
169
+
170
+ repo = git_ops.get_repo(result.repo_path)
171
+ git_ops.push_branch(repo, config.pr.branch, force=True)
172
+
173
+ body = _build_pr_body(result.failures)
174
+ git_ops.create_or_update_pr(
175
+ result.repo_path,
176
+ config.pr.branch,
177
+ config.pr.title,
178
+ body,
179
+ config.pr.labels or None,
180
+ auto_merge=config.pr.auto_merge,
181
+ )
182
+ logger.info(f"{result.dest.name}: PR created/updated")
183
+
184
+
185
+ def _resolve_repo_path(dest: Destination, src_root: Path, work_dir: str) -> Path:
186
+ if dest.dest_path_relative:
187
+ return (src_root / dest.dest_path_relative).resolve()
188
+ if not work_dir:
189
+ raise typer.BadParameter(f"No dest_path_relative for {dest.name}, --work-dir required")
190
+ return Path(work_dir) / dest.name
191
+
192
+
193
+ def _ensure_repo(dest: Destination, repo_path: Path):
194
+ if repo_path.exists() and git_ops.is_git_repo(repo_path):
195
+ return git_ops.get_repo(repo_path)
196
+ if not dest.repo_url:
197
+ raise ValueError(f"Dest {dest.name} not found at {repo_path} and no repo_url configured")
198
+ return git_ops.clone_repo(dest.repo_url, repo_path)
199
+
200
+
201
+ def _run_command(cmd: str, cwd: Path) -> None:
202
+ logger.info(f"Running: {cmd}")
203
+ result = subprocess.run(cmd, shell=True, cwd=cwd, capture_output=True, text=True)
204
+ if result.returncode != 0:
205
+ stderr = result.stderr.strip()
206
+ logger.error(f"Command failed '{cmd}' in {cwd}: {stderr}")
207
+ raise subprocess.CalledProcessError(result.returncode, cmd, output=result.stdout, stderr=stderr)
208
+
209
+
210
+ def _run_verify_steps(
211
+ repo: Repo,
212
+ repo_path: Path,
213
+ verify: VerifyConfig,
214
+ ) -> tuple[Status, list[StepFailure]]:
215
+ failures: list[StepFailure] = []
216
+
217
+ for step in verify.steps:
218
+ on_fail = step.on_fail or verify.on_fail
219
+
220
+ try:
221
+ _run_command(step.run, repo_path)
222
+ except subprocess.CalledProcessError as e:
223
+ failure = StepFailure.from_error(step.run, e, on_fail)
224
+ match on_fail:
225
+ case OnFailStrategy.FAIL:
226
+ return (Status.FAILED, [failure])
227
+ case OnFailStrategy.SKIP:
228
+ return (Status.SKIPPED, [failure])
229
+ case OnFailStrategy.WARN:
230
+ failures.append(failure)
231
+ continue
232
+
233
+ if step.commit:
234
+ git_ops.stage_and_commit(repo, step.commit.add_paths, step.commit.message)
235
+
236
+ return (Status.WARN if failures else Status.PASSED, failures)
237
+
238
+
239
+ def _build_pr_body(failures: list[StepFailure]) -> str:
240
+ body = "Automated dependency update."
241
+ if not failures:
242
+ return body
243
+
244
+ body += "\n\n---\n## Verification Issues\n"
245
+ for f in failures:
246
+ body += f"\n### `{f.step}` (strategy: {f.on_fail})\n"
247
+ body += f"**Exit code:** {f.returncode}\n"
248
+ if f.stderr:
249
+ body += f"\n```\n{f.stderr}\n```\n"
250
+ return body
@@ -0,0 +1,50 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+
6
+ import typer
7
+
8
+ from path_sync._internal import git_ops
9
+ from path_sync._internal.models import find_repo_root
10
+ from path_sync._internal.typer_app import app
11
+ from path_sync._internal.validation import parse_skip_sections, validate_no_unauthorized_changes
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ @app.command("validate-no-changes")
17
+ def validate_no_changes(
18
+ branch: str = typer.Option("main", "-b", "--branch", help="Default branch to compare against"),
19
+ skip_sections_opt: str = typer.Option(
20
+ "",
21
+ "--skip-sections",
22
+ help="Comma-separated path:section_id pairs to skip (e.g., 'justfile:coverage,pyproject.toml:default')",
23
+ ),
24
+ src_root_opt: str = typer.Option(
25
+ "",
26
+ "--src-root",
27
+ help="Source repo root (default: find git root from cwd)",
28
+ ),
29
+ ) -> None:
30
+ """Validate no unauthorized changes to synced files."""
31
+ repo_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
32
+ repo = git_ops.get_repo(repo_root)
33
+
34
+ current_branch = repo.active_branch.name
35
+ if current_branch.startswith("sync/"):
36
+ logger.info(f"On sync branch {current_branch}, validation skipped")
37
+ return
38
+ if current_branch == branch:
39
+ logger.info(f"On default branch {branch}, validation skipped")
40
+ return
41
+
42
+ skip_sections = parse_skip_sections(skip_sections_opt) if skip_sections_opt else None
43
+ unauthorized = validate_no_unauthorized_changes(repo_root, branch, skip_sections)
44
+
45
+ if unauthorized:
46
+ files_list = "\n ".join(unauthorized)
47
+ logger.error(f"Unauthorized changes in {len(unauthorized)} files:\n {files_list}")
48
+ raise typer.Exit(1)
49
+
50
+ logger.info("Validation passed: no unauthorized changes")
@@ -0,0 +1,7 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def ensure_parents_write_text(path: Path | str, text: str) -> None:
5
+ path = Path(path)
6
+ path.parent.mkdir(parents=True, exist_ok=True)
7
+ path.write_text(text)
@@ -0,0 +1,213 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import os
5
+ import subprocess
6
+ from contextlib import suppress
7
+ from pathlib import Path
8
+
9
+ from git import GitCommandError, InvalidGitRepositoryError, NoSuchPathError, Repo
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def _auth_url(url: str) -> str:
15
+ """Inject GH_TOKEN into HTTPS URL for authentication."""
16
+ token = os.environ.get("GH_TOKEN", "")
17
+ if not token:
18
+ return url
19
+ if url.startswith("https://github.com/"):
20
+ return url.replace("https://", f"https://x-access-token:{token}@")
21
+ return url
22
+
23
+
24
+ def get_repo(path: Path) -> Repo:
25
+ try:
26
+ return Repo(path)
27
+ except InvalidGitRepositoryError as e:
28
+ raise ValueError(f"Not a git repository: {path}") from e
29
+
30
+
31
+ def is_git_repo(path: Path) -> bool:
32
+ with suppress(InvalidGitRepositoryError, NoSuchPathError):
33
+ Repo(path)
34
+ return True
35
+ return False
36
+
37
+
38
+ def get_default_branch(repo: Repo) -> str:
39
+ with suppress(GitCommandError):
40
+ return repo.git.symbolic_ref("refs/remotes/origin/HEAD", short=True).replace("origin/", "")
41
+ return "main"
42
+
43
+
44
+ def clone_repo(url: str, dest: Path) -> Repo:
45
+ logger.info(f"Cloning {url} to {dest}")
46
+ dest.parent.mkdir(parents=True, exist_ok=True)
47
+ return Repo.clone_from(_auth_url(url), str(dest))
48
+
49
+
50
+ def checkout_branch(repo: Repo, branch: str) -> None:
51
+ current = repo.active_branch.name
52
+ if current == branch:
53
+ return
54
+ logger.info(f"Checking out {branch}")
55
+ try:
56
+ repo.git.checkout(branch)
57
+ except GitCommandError:
58
+ repo.git.checkout("-b", branch)
59
+
60
+
61
+ def prepare_copy_branch(repo: Repo, default_branch: str, copy_branch: str, from_default: bool = False) -> None:
62
+ """Prepare copy_branch for syncing.
63
+
64
+ Args:
65
+ from_default: If True, fetch origin and reset to origin/default_branch
66
+ before creating copy_branch. Use in CI for clean state.
67
+ If False, just switch to or create copy_branch from current HEAD.
68
+ """
69
+ if from_default:
70
+ logger.info("Fetching origin")
71
+ repo.git.fetch("origin")
72
+
73
+ logger.info(f"Checking out {default_branch}")
74
+ repo.git.checkout(default_branch)
75
+ repo.git.reset("--hard", f"origin/{default_branch}")
76
+
77
+ with suppress(GitCommandError):
78
+ repo.git.branch("-D", copy_branch)
79
+ logger.info(f"Deleted existing local branch: {copy_branch}")
80
+
81
+ logger.info(f"Creating fresh branch: {copy_branch}")
82
+ repo.git.checkout("-b", copy_branch)
83
+ else:
84
+ checkout_branch(repo, copy_branch)
85
+
86
+
87
+ def get_current_sha(repo: Repo) -> str:
88
+ return repo.head.commit.hexsha
89
+
90
+
91
+ def get_remote_url(repo: Repo, remote_name: str = "origin") -> str:
92
+ try:
93
+ return repo.remote(remote_name).url
94
+ except ValueError:
95
+ return ""
96
+
97
+
98
+ def has_changes(repo: Repo) -> bool:
99
+ return repo.is_dirty() or len(repo.untracked_files) > 0
100
+
101
+
102
+ def commit_changes(repo: Repo, message: str) -> None:
103
+ repo.git.add("-A")
104
+ if repo.is_dirty():
105
+ _ensure_git_user(repo)
106
+ repo.git.commit("-m", message)
107
+ logger.info(f"Committed: {message}")
108
+
109
+
110
+ def stage_and_commit(repo: Repo, add_paths: list[str], message: str) -> bool:
111
+ """Stage specified paths and commit if there are changes. Returns True if a commit was made."""
112
+ include = [p for p in add_paths if not p.startswith("!")]
113
+ exclude = [p[1:] for p in add_paths if p.startswith("!")]
114
+ for path in include:
115
+ repo.git.add(path)
116
+ for path in exclude:
117
+ repo.git.reset("HEAD", "--", path)
118
+ if not repo.is_dirty(index=True):
119
+ return False
120
+ _ensure_git_user(repo)
121
+ repo.git.commit("-m", message)
122
+ logger.info(f"Committed: {message}")
123
+ return True
124
+
125
+
126
+ def _ensure_git_user(repo: Repo) -> None:
127
+ """Configure git user if not already set."""
128
+ try:
129
+ repo.config_reader().get_value("user", "name")
130
+ except Exception:
131
+ repo.config_writer().set_value("user", "name", "path-sync[bot]").release()
132
+ repo.config_writer().set_value("user", "email", "path-sync[bot]@users.noreply.github.com").release()
133
+
134
+
135
+ def push_branch(repo: Repo, branch: str, force: bool = True) -> None:
136
+ logger.info(f"Pushing {branch}" + (" (force)" if force else ""))
137
+ args = ["--force", "-u", "origin", branch] if force else ["-u", "origin", branch]
138
+ repo.git.push(*args)
139
+
140
+
141
+ def update_pr_body(repo_path: Path, branch: str, body: str) -> bool:
142
+ cmd = ["gh", "pr", "edit", branch, "--body", body]
143
+ result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
144
+ if result.returncode != 0:
145
+ logger.warning(f"Failed to update PR body: {result.stderr}")
146
+ return False
147
+ logger.info("Updated PR body")
148
+ return True
149
+
150
+
151
+ def create_or_update_pr(
152
+ repo_path: Path,
153
+ branch: str,
154
+ title: str,
155
+ body: str = "",
156
+ labels: list[str] | None = None,
157
+ reviewers: list[str] | None = None,
158
+ assignees: list[str] | None = None,
159
+ auto_merge: bool = False,
160
+ ) -> str:
161
+ cmd = ["gh", "pr", "create", "--head", branch, "--title", title]
162
+ cmd.extend(["--body", body or ""])
163
+ if labels:
164
+ cmd.extend(["--label", ",".join(labels)])
165
+ if reviewers:
166
+ cmd.extend(["--reviewer", ",".join(reviewers)])
167
+ if assignees:
168
+ cmd.extend(["--assignee", ",".join(assignees)])
169
+
170
+ result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
171
+ if result.returncode != 0:
172
+ if "already exists" in result.stderr:
173
+ logger.info("PR already exists, updating body")
174
+ update_pr_body(repo_path, branch, body)
175
+ if auto_merge:
176
+ _enable_auto_merge(repo_path, branch)
177
+ return ""
178
+ raise RuntimeError(f"Failed to create PR: {result.stderr}")
179
+ pr_url = result.stdout.strip()
180
+ logger.info(f"Created PR: {pr_url}")
181
+ if auto_merge:
182
+ _enable_auto_merge(repo_path, pr_url)
183
+ return pr_url
184
+
185
+
186
+ def _enable_auto_merge(repo_path: Path, pr_ref: str) -> None:
187
+ cmd = ["gh", "pr", "merge", "--auto", "--squash", pr_ref]
188
+ result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
189
+ if result.returncode != 0:
190
+ logger.warning(f"Auto-merge failed (branch protection may not be configured): {result.stderr}")
191
+ else:
192
+ logger.info(f"Enabled auto-merge for {pr_ref}")
193
+
194
+
195
+ def file_has_git_changes(repo: Repo, file_path: Path, base_ref: str = "HEAD") -> bool:
196
+ rel_path = str(file_path.relative_to(repo.working_dir))
197
+ diff = repo.git.diff("--name-only", base_ref, "--", rel_path)
198
+ return bool(diff.strip())
199
+
200
+
201
+ def get_changed_files(repo: Repo, base_ref: str = "HEAD") -> list[Path]:
202
+ diff = repo.git.diff("--name-only", base_ref)
203
+ if not diff.strip():
204
+ return []
205
+ repo_root = Path(repo.working_dir)
206
+ return [repo_root / p for p in diff.strip().split("\n")]
207
+
208
+
209
+ def get_file_content_at_ref(repo: Repo, file_path: Path, ref: str) -> str | None:
210
+ rel_path = str(file_path.relative_to(repo.working_dir))
211
+ with suppress(GitCommandError):
212
+ return repo.git.show(f"{ref}:{rel_path}")
213
+ return None
@@ -0,0 +1,83 @@
1
+ from __future__ import annotations
2
+
3
+ import re
4
+ from pathlib import Path
5
+
6
+ from zero_3rdparty.sections import CommentConfig, get_comment_config
7
+
8
+ from path_sync._internal.models import HEADER_TEMPLATE, HeaderConfig
9
+
10
+ HEADER_PATTERN = re.compile(r"path-sync copy -n (?P<config_name>[\w-]+)")
11
+
12
+
13
+ def _resolve_comment_config(path: Path, config: HeaderConfig | None) -> CommentConfig:
14
+ if config:
15
+ ext = path.suffix
16
+ prefix = config.comment_prefixes.get(ext)
17
+ suffix = config.comment_suffixes.get(ext, "")
18
+ if prefix:
19
+ return CommentConfig(prefix, suffix)
20
+ return get_comment_config(path)
21
+
22
+
23
+ def get_comment_prefix(path: Path, config: HeaderConfig | None = None) -> str:
24
+ return _resolve_comment_config(path, config).prefix
25
+
26
+
27
+ def get_comment_suffix(path: Path, config: HeaderConfig | None = None) -> str:
28
+ return _resolve_comment_config(path, config).suffix
29
+
30
+
31
+ def get_header_line(path: Path, config_name: str, config: HeaderConfig | None = None) -> str:
32
+ cc = _resolve_comment_config(path, config)
33
+ header_text = HEADER_TEMPLATE.format(config_name=config_name)
34
+ return f"{cc.prefix} {header_text}{cc.suffix}"
35
+
36
+
37
+ def has_header(content: str) -> bool:
38
+ first_line = content.split("\n", 1)[0] if content else ""
39
+ return bool(HEADER_PATTERN.search(first_line))
40
+
41
+
42
+ def get_config_name(content: str) -> str | None:
43
+ first_line = content.split("\n", 1)[0] if content else ""
44
+ if match := HEADER_PATTERN.search(first_line):
45
+ return match.group("config_name")
46
+ return None
47
+
48
+
49
+ def add_header(content: str, path: Path, config_name: str, config: HeaderConfig | None = None) -> str:
50
+ header = get_header_line(path, config_name, config)
51
+ return f"{header}\n{content}"
52
+
53
+
54
+ def remove_header(content: str) -> str:
55
+ if not has_header(content):
56
+ return content
57
+ lines = content.split("\n", 1)
58
+ return lines[1] if len(lines) > 1 else ""
59
+
60
+
61
+ def has_known_comment_prefix(path: Path) -> bool:
62
+ try:
63
+ get_comment_config(path)
64
+ return True
65
+ except ValueError:
66
+ return False
67
+
68
+
69
+ def file_get_config_name(path: Path) -> str | None:
70
+ if not path.exists() or not has_known_comment_prefix(path):
71
+ return None
72
+ try:
73
+ with path.open() as f:
74
+ first_line = f.readline()
75
+ except (UnicodeDecodeError, OSError):
76
+ return None
77
+ return get_config_name(first_line)
78
+
79
+
80
+ def file_has_header(path: Path, config: HeaderConfig | None = None) -> bool:
81
+ if config and path.suffix not in config.comment_prefixes:
82
+ return False
83
+ return file_get_config_name(path) is not None