ossplate 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -0
- package/bin/darwin-arm64/ossplate +0 -0
- package/bin/darwin-x64/ossplate +3 -0
- package/bin/linux-x64/ossplate +3 -0
- package/bin/ossplate.js +5 -0
- package/bin/win32-x64/ossplate.exe +3 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.js +56 -0
- package/package.json +43 -0
- package/scaffold/.github/workflows/ci.yml +76 -0
- package/scaffold/.github/workflows/publish-npm.yml +45 -0
- package/scaffold/.github/workflows/publish.yml +97 -0
- package/scaffold/.gitignore +9 -0
- package/scaffold/CONTRIBUTING.md +30 -0
- package/scaffold/LICENSE +9 -0
- package/scaffold/README.md +73 -0
- package/scaffold/core-rs/Cargo.lock +338 -0
- package/scaffold/core-rs/Cargo.toml +28 -0
- package/scaffold/core-rs/src/main.rs +1609 -0
- package/scaffold/docs/README.md +21 -0
- package/scaffold/docs/customizing-the-template.md +144 -0
- package/scaffold/docs/phase-1-contract.md +52 -0
- package/scaffold/docs/testing.md +71 -0
- package/scaffold/docs/upgrade-plan.md +251 -0
- package/scaffold/ossplate.toml +15 -0
- package/scaffold/scripts/verify.sh +36 -0
- package/scaffold/wrapper-js/README.md +7 -0
- package/scaffold/wrapper-js/bin/darwin-arm64/ossplate +0 -0
- package/scaffold/wrapper-js/bin/darwin-x64/ossplate +3 -0
- package/scaffold/wrapper-js/bin/linux-x64/ossplate +3 -0
- package/scaffold/wrapper-js/bin/ossplate.js +5 -0
- package/scaffold/wrapper-js/bin/win32-x64/ossplate.exe +3 -0
- package/scaffold/wrapper-js/package-lock.json +51 -0
- package/scaffold/wrapper-js/package.json +43 -0
- package/scaffold/wrapper-js/src/index.ts +69 -0
- package/scaffold/wrapper-js/tsconfig.json +14 -0
- package/scaffold/wrapper-py/README.md +7 -0
- package/scaffold/wrapper-py/hatch_build.py +16 -0
- package/scaffold/wrapper-py/pyproject.toml +37 -0
- package/scaffold/wrapper-py/src/ossplate/__init__.py +3 -0
- package/scaffold/wrapper-py/src/ossplate/bin/darwin-arm64/ossplate +0 -0
- package/scaffold/wrapper-py/src/ossplate/bin/darwin-x64/ossplate +3 -0
- package/scaffold/wrapper-py/src/ossplate/bin/linux-x64/ossplate +3 -0
- package/scaffold/wrapper-py/src/ossplate/bin/win32-x64/ossplate.exe +3 -0
- package/scaffold/wrapper-py/src/ossplate/cli.py +55 -0
|
@@ -0,0 +1,1609 @@
|
|
|
1
|
+
use anyhow::{anyhow, bail, Context, Result};
|
|
2
|
+
use clap::{Args, Parser, Subcommand};
|
|
3
|
+
use serde::{Deserialize, Serialize};
|
|
4
|
+
use serde_json::json;
|
|
5
|
+
use std::collections::BTreeMap;
|
|
6
|
+
use std::fs;
|
|
7
|
+
use std::path::{Path, PathBuf};
|
|
8
|
+
use toml::Value as TomlValue;
|
|
9
|
+
|
|
10
|
+
const README_IDENTITY_START: &str = "<!-- ossplate:readme-identity:start -->";
|
|
11
|
+
const README_IDENTITY_END: &str = "<!-- ossplate:readme-identity:end -->";
|
|
12
|
+
const WORKFLOW_NAME_START: &str = "# ossplate:workflow-name:start";
|
|
13
|
+
const WORKFLOW_NAME_END: &str = "# ossplate:workflow-name:end";
|
|
14
|
+
|
|
15
|
+
#[derive(Parser)]
|
|
16
|
+
#[command(name = "ossplate")]
|
|
17
|
+
#[command(
|
|
18
|
+
author,
|
|
19
|
+
version,
|
|
20
|
+
about = "Validate and sync a multi-registry OSS scaffold"
|
|
21
|
+
)]
|
|
22
|
+
struct Cli {
|
|
23
|
+
#[command(subcommand)]
|
|
24
|
+
command: Commands,
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
#[derive(Subcommand)]
|
|
28
|
+
enum Commands {
|
|
29
|
+
/// Print tool version information
|
|
30
|
+
Version,
|
|
31
|
+
/// Scaffold a new target directory from the current template
|
|
32
|
+
Create {
|
|
33
|
+
target: PathBuf,
|
|
34
|
+
#[command(flatten)]
|
|
35
|
+
overrides: IdentityOverrides,
|
|
36
|
+
},
|
|
37
|
+
/// Initialize or hydrate an existing directory in place
|
|
38
|
+
Init {
|
|
39
|
+
#[arg(long, default_value = ".")]
|
|
40
|
+
path: PathBuf,
|
|
41
|
+
#[command(flatten)]
|
|
42
|
+
overrides: IdentityOverrides,
|
|
43
|
+
},
|
|
44
|
+
/// Validate project identity and metadata consistency
|
|
45
|
+
Validate {
|
|
46
|
+
#[arg(long, default_value = ".")]
|
|
47
|
+
path: PathBuf,
|
|
48
|
+
#[arg(long)]
|
|
49
|
+
json: bool,
|
|
50
|
+
},
|
|
51
|
+
/// Synchronize owned metadata surfaces
|
|
52
|
+
Sync {
|
|
53
|
+
#[arg(long, default_value = ".")]
|
|
54
|
+
path: PathBuf,
|
|
55
|
+
#[arg(long)]
|
|
56
|
+
check: bool,
|
|
57
|
+
},
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
#[derive(Debug, Clone, Default, Args)]
|
|
61
|
+
struct IdentityOverrides {
|
|
62
|
+
#[arg(long)]
|
|
63
|
+
name: Option<String>,
|
|
64
|
+
#[arg(long)]
|
|
65
|
+
description: Option<String>,
|
|
66
|
+
#[arg(long)]
|
|
67
|
+
repository: Option<String>,
|
|
68
|
+
#[arg(long)]
|
|
69
|
+
license: Option<String>,
|
|
70
|
+
#[arg(long = "author-name")]
|
|
71
|
+
author_name: Option<String>,
|
|
72
|
+
#[arg(long = "author-email")]
|
|
73
|
+
author_email: Option<String>,
|
|
74
|
+
#[arg(long = "rust-crate")]
|
|
75
|
+
rust_crate: Option<String>,
|
|
76
|
+
#[arg(long = "npm-package")]
|
|
77
|
+
npm_package: Option<String>,
|
|
78
|
+
#[arg(long = "python-package")]
|
|
79
|
+
python_package: Option<String>,
|
|
80
|
+
#[arg(long)]
|
|
81
|
+
command: Option<String>,
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
#[derive(Debug, Clone, Deserialize, Serialize)]
|
|
85
|
+
struct ToolConfig {
|
|
86
|
+
project: ProjectConfig,
|
|
87
|
+
author: AuthorConfig,
|
|
88
|
+
packages: PackageConfig,
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
#[derive(Debug, Clone, Deserialize, Serialize)]
|
|
92
|
+
struct ProjectConfig {
|
|
93
|
+
name: String,
|
|
94
|
+
description: String,
|
|
95
|
+
repository: String,
|
|
96
|
+
license: String,
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
#[derive(Debug, Clone, Deserialize, Serialize)]
|
|
100
|
+
struct AuthorConfig {
|
|
101
|
+
name: String,
|
|
102
|
+
email: String,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
#[derive(Debug, Clone, Deserialize, Serialize)]
|
|
106
|
+
struct PackageConfig {
|
|
107
|
+
rust_crate: String,
|
|
108
|
+
npm_package: String,
|
|
109
|
+
python_package: String,
|
|
110
|
+
command: String,
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
#[derive(Debug, Clone, Serialize)]
|
|
114
|
+
struct VersionOutput {
|
|
115
|
+
tool: &'static str,
|
|
116
|
+
version: &'static str,
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
#[derive(Debug, Clone, Serialize)]
|
|
120
|
+
struct ValidationOutput {
|
|
121
|
+
ok: bool,
|
|
122
|
+
issues: Vec<ValidationIssue>,
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
#[derive(Debug, Clone, Serialize, PartialEq, Eq)]
|
|
126
|
+
struct ValidationIssue {
|
|
127
|
+
code: String,
|
|
128
|
+
file: String,
|
|
129
|
+
message: String,
|
|
130
|
+
expected: Option<String>,
|
|
131
|
+
actual: Option<String>,
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
#[derive(Debug, Clone)]
|
|
135
|
+
struct SyncChange {
|
|
136
|
+
path: &'static str,
|
|
137
|
+
issues: Vec<ValidationIssue>,
|
|
138
|
+
synced: String,
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
#[derive(Debug, Clone)]
|
|
142
|
+
struct ManagedFile {
|
|
143
|
+
path: &'static str,
|
|
144
|
+
validate: fn(&ToolConfig, &str) -> Result<Vec<ValidationIssue>>,
|
|
145
|
+
sync: fn(&ToolConfig, &str) -> Result<String>,
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
fn main() {
|
|
149
|
+
if let Err(error) = run() {
|
|
150
|
+
eprintln!("ossplate: {error}");
|
|
151
|
+
std::process::exit(1);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
fn run() -> Result<()> {
|
|
156
|
+
let cli = Cli::parse();
|
|
157
|
+
match cli.command {
|
|
158
|
+
Commands::Version => {
|
|
159
|
+
println!(
|
|
160
|
+
"{}",
|
|
161
|
+
serde_json::to_string(&VersionOutput {
|
|
162
|
+
tool: env!("CARGO_BIN_NAME"),
|
|
163
|
+
version: env!("CARGO_PKG_VERSION"),
|
|
164
|
+
})?
|
|
165
|
+
);
|
|
166
|
+
Ok(())
|
|
167
|
+
}
|
|
168
|
+
Commands::Create { target, overrides } => create_scaffold(&target, &overrides),
|
|
169
|
+
Commands::Init { path, overrides } => init_scaffold(&path, &overrides),
|
|
170
|
+
Commands::Validate { path, json } => {
|
|
171
|
+
let output = validate_repo(&path)?;
|
|
172
|
+
if json {
|
|
173
|
+
println!("{}", serde_json::to_string(&output)?);
|
|
174
|
+
} else if output.ok {
|
|
175
|
+
println!("validation ok");
|
|
176
|
+
} else {
|
|
177
|
+
println!(
|
|
178
|
+
"{}",
|
|
179
|
+
format_human_issues("validation failed:", &output.issues)
|
|
180
|
+
);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if output.ok {
|
|
184
|
+
Ok(())
|
|
185
|
+
} else {
|
|
186
|
+
bail!("validation failed")
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
Commands::Sync { path, check } => sync_repo(&path, check),
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
fn create_scaffold(target: &Path, overrides: &IdentityOverrides) -> Result<()> {
|
|
194
|
+
let source_root = discover_template_root()?;
|
|
195
|
+
ensure_scaffold_source_root(&source_root)?;
|
|
196
|
+
create_scaffold_from(&source_root, target, overrides)
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
fn init_scaffold(target: &Path, overrides: &IdentityOverrides) -> Result<()> {
|
|
200
|
+
let source_root = discover_template_root()?;
|
|
201
|
+
ensure_scaffold_source_root(&source_root)?;
|
|
202
|
+
init_scaffold_from(&source_root, target, overrides)
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
fn create_scaffold_from(
|
|
206
|
+
source_root: &Path,
|
|
207
|
+
target: &Path,
|
|
208
|
+
overrides: &IdentityOverrides,
|
|
209
|
+
) -> Result<()> {
|
|
210
|
+
if target.exists() {
|
|
211
|
+
if target.read_dir()?.next().is_some() {
|
|
212
|
+
bail!("target directory is not empty: {}", target.display());
|
|
213
|
+
}
|
|
214
|
+
} else {
|
|
215
|
+
fs::create_dir_all(target)
|
|
216
|
+
.with_context(|| format!("failed to create {}", target.display()))?;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
let source_root = source_root.canonicalize().with_context(|| {
|
|
220
|
+
format!(
|
|
221
|
+
"failed to canonicalize source root {}",
|
|
222
|
+
source_root.display()
|
|
223
|
+
)
|
|
224
|
+
})?;
|
|
225
|
+
let target_root = target
|
|
226
|
+
.canonicalize()
|
|
227
|
+
.with_context(|| format!("failed to canonicalize target root {}", target.display()))?;
|
|
228
|
+
if target_root.starts_with(&source_root) {
|
|
229
|
+
bail!("target directory must not be inside the source template tree");
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
copy_tree(&source_root, &target_root)?;
|
|
233
|
+
apply_config_overrides_to_target(&target_root, &source_root, overrides)?;
|
|
234
|
+
sync_repo(&target_root, false)?;
|
|
235
|
+
println!("created scaffold at {}", target_root.display());
|
|
236
|
+
Ok(())
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
fn init_scaffold_from(
|
|
240
|
+
source_root: &Path,
|
|
241
|
+
target: &Path,
|
|
242
|
+
overrides: &IdentityOverrides,
|
|
243
|
+
) -> Result<()> {
|
|
244
|
+
if !target.exists() {
|
|
245
|
+
fs::create_dir_all(target)
|
|
246
|
+
.with_context(|| format!("failed to create {}", target.display()))?;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
let source_root = source_root.canonicalize().with_context(|| {
|
|
250
|
+
format!(
|
|
251
|
+
"failed to canonicalize source root {}",
|
|
252
|
+
source_root.display()
|
|
253
|
+
)
|
|
254
|
+
})?;
|
|
255
|
+
let target_root = target
|
|
256
|
+
.canonicalize()
|
|
257
|
+
.with_context(|| format!("failed to canonicalize target root {}", target.display()))?;
|
|
258
|
+
if target_root.starts_with(&source_root) {
|
|
259
|
+
bail!("target directory must not be inside the source template tree");
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
ensure_scaffold_layout(&source_root, &target_root)?;
|
|
263
|
+
apply_config_overrides_to_target(&target_root, &source_root, overrides)?;
|
|
264
|
+
sync_repo(&target_root, false)?;
|
|
265
|
+
println!("initialized scaffold at {}", target_root.display());
|
|
266
|
+
Ok(())
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
fn validate_repo(root: &Path) -> Result<ValidationOutput> {
|
|
270
|
+
let config = load_config(root)?;
|
|
271
|
+
let current = collect_current_files(root)?;
|
|
272
|
+
let mut issues = Vec::new();
|
|
273
|
+
|
|
274
|
+
for file in &managed_files() {
|
|
275
|
+
let actual = current
|
|
276
|
+
.get(file.path)
|
|
277
|
+
.ok_or_else(|| anyhow!("missing owned file {}", file.path))?;
|
|
278
|
+
issues.extend((file.validate)(&config, actual)?);
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
let output = ValidationOutput {
|
|
282
|
+
ok: issues.is_empty(),
|
|
283
|
+
issues,
|
|
284
|
+
};
|
|
285
|
+
Ok(output)
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
fn sync_repo(root: &Path, check: bool) -> Result<()> {
|
|
289
|
+
let drifted = build_sync_changes(root)?;
|
|
290
|
+
|
|
291
|
+
if check {
|
|
292
|
+
if drifted.is_empty() {
|
|
293
|
+
println!("sync check ok");
|
|
294
|
+
return Ok(());
|
|
295
|
+
}
|
|
296
|
+
let issues = drifted
|
|
297
|
+
.iter()
|
|
298
|
+
.flat_map(|change| change.issues.iter().cloned())
|
|
299
|
+
.collect::<Vec<_>>();
|
|
300
|
+
println!("{}", format_human_issues("sync check failed:", &issues));
|
|
301
|
+
bail!("sync check failed")
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
for change in drifted {
|
|
305
|
+
let target = root.join(change.path);
|
|
306
|
+
fs::write(&target, change.synced)
|
|
307
|
+
.with_context(|| format!("failed to write {}", target.display()))?;
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
println!("sync complete");
|
|
311
|
+
Ok(())
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
fn load_config(root: &Path) -> Result<ToolConfig> {
|
|
315
|
+
let contents =
|
|
316
|
+
fs::read_to_string(root.join("ossplate.toml")).context("failed to read ossplate.toml")?;
|
|
317
|
+
toml::from_str(&contents).context("failed to parse ossplate.toml")
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
fn write_config(root: &Path, config: &ToolConfig) -> Result<()> {
|
|
321
|
+
let mut rendered = toml::to_string(config).context("failed to serialize ossplate.toml")?;
|
|
322
|
+
if !rendered.ends_with('\n') {
|
|
323
|
+
rendered.push('\n');
|
|
324
|
+
}
|
|
325
|
+
fs::write(root.join("ossplate.toml"), rendered).context("failed to write ossplate.toml")?;
|
|
326
|
+
Ok(())
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
fn apply_config_overrides_to_target(
|
|
330
|
+
target_root: &Path,
|
|
331
|
+
source_root: &Path,
|
|
332
|
+
overrides: &IdentityOverrides,
|
|
333
|
+
) -> Result<()> {
|
|
334
|
+
let mut config = if target_root.join("ossplate.toml").is_file() {
|
|
335
|
+
load_config(target_root)?
|
|
336
|
+
} else {
|
|
337
|
+
load_config(source_root)?
|
|
338
|
+
};
|
|
339
|
+
|
|
340
|
+
apply_overrides(&mut config, overrides);
|
|
341
|
+
write_config(target_root, &config)
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
fn apply_overrides(config: &mut ToolConfig, overrides: &IdentityOverrides) {
|
|
345
|
+
if let Some(value) = &overrides.name {
|
|
346
|
+
config.project.name = value.clone();
|
|
347
|
+
}
|
|
348
|
+
if let Some(value) = &overrides.description {
|
|
349
|
+
config.project.description = value.clone();
|
|
350
|
+
}
|
|
351
|
+
if let Some(value) = &overrides.repository {
|
|
352
|
+
config.project.repository = value.clone();
|
|
353
|
+
}
|
|
354
|
+
if let Some(value) = &overrides.license {
|
|
355
|
+
config.project.license = value.clone();
|
|
356
|
+
}
|
|
357
|
+
if let Some(value) = &overrides.author_name {
|
|
358
|
+
config.author.name = value.clone();
|
|
359
|
+
}
|
|
360
|
+
if let Some(value) = &overrides.author_email {
|
|
361
|
+
config.author.email = value.clone();
|
|
362
|
+
}
|
|
363
|
+
if let Some(value) = &overrides.rust_crate {
|
|
364
|
+
config.packages.rust_crate = value.clone();
|
|
365
|
+
}
|
|
366
|
+
if let Some(value) = &overrides.npm_package {
|
|
367
|
+
config.packages.npm_package = value.clone();
|
|
368
|
+
}
|
|
369
|
+
if let Some(value) = &overrides.python_package {
|
|
370
|
+
config.packages.python_package = value.clone();
|
|
371
|
+
}
|
|
372
|
+
if let Some(value) = &overrides.command {
|
|
373
|
+
config.packages.command = value.clone();
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
fn build_sync_changes(root: &Path) -> Result<Vec<SyncChange>> {
|
|
378
|
+
let config = load_config(root)?;
|
|
379
|
+
let current = collect_current_files(root)?;
|
|
380
|
+
let mut drifted = Vec::new();
|
|
381
|
+
|
|
382
|
+
for file in &managed_files() {
|
|
383
|
+
let actual = current
|
|
384
|
+
.get(file.path)
|
|
385
|
+
.ok_or_else(|| anyhow!("missing owned file {}", file.path))?;
|
|
386
|
+
let issues = (file.validate)(&config, actual)?;
|
|
387
|
+
if !issues.is_empty() {
|
|
388
|
+
drifted.push(SyncChange {
|
|
389
|
+
path: file.path,
|
|
390
|
+
synced: (file.sync)(&config, actual)?,
|
|
391
|
+
issues,
|
|
392
|
+
});
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
Ok(drifted)
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
fn collect_current_files(root: &Path) -> Result<BTreeMap<&'static str, String>> {
|
|
400
|
+
let mut files = BTreeMap::new();
|
|
401
|
+
for path in owned_paths() {
|
|
402
|
+
files.insert(
|
|
403
|
+
path,
|
|
404
|
+
fs::read_to_string(root.join(path))
|
|
405
|
+
.with_context(|| format!("failed to read {}", root.join(path).display()))?,
|
|
406
|
+
);
|
|
407
|
+
}
|
|
408
|
+
Ok(files)
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
fn discover_template_root() -> Result<PathBuf> {
|
|
412
|
+
if let Some(explicit) = std::env::var_os("OSSPLATE_TEMPLATE_ROOT") {
|
|
413
|
+
let explicit = PathBuf::from(explicit);
|
|
414
|
+
if explicit.join("ossplate.toml").is_file() {
|
|
415
|
+
return Ok(explicit);
|
|
416
|
+
}
|
|
417
|
+
bail!("OSSPLATE_TEMPLATE_ROOT does not point to a scaffold root containing ossplate.toml");
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
let exe = std::env::current_exe().context("failed to determine current executable path")?;
|
|
421
|
+
for ancestor in exe.ancestors() {
|
|
422
|
+
if ancestor.join("ossplate.toml").is_file() {
|
|
423
|
+
return Ok(ancestor.to_path_buf());
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
std::env::current_dir()
|
|
427
|
+
.context("failed to determine current directory")?
|
|
428
|
+
.ancestors()
|
|
429
|
+
.find(|ancestor| ancestor.join("ossplate.toml").is_file())
|
|
430
|
+
.map(Path::to_path_buf)
|
|
431
|
+
.ok_or_else(|| anyhow!("failed to locate template root containing ossplate.toml"))
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
fn ensure_scaffold_source_root(root: &Path) -> Result<()> {
|
|
435
|
+
let required = [
|
|
436
|
+
"ossplate.toml",
|
|
437
|
+
"README.md",
|
|
438
|
+
"core-rs/Cargo.toml",
|
|
439
|
+
"wrapper-js/package.json",
|
|
440
|
+
"wrapper-py/pyproject.toml",
|
|
441
|
+
];
|
|
442
|
+
|
|
443
|
+
let missing: Vec<_> = required
|
|
444
|
+
.iter()
|
|
445
|
+
.filter(|path| !root.join(path).exists())
|
|
446
|
+
.copied()
|
|
447
|
+
.collect();
|
|
448
|
+
|
|
449
|
+
if missing.is_empty() {
|
|
450
|
+
return Ok(());
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
bail!(
|
|
454
|
+
"create/init require a full scaffold source checkout; missing required scaffold paths: {}",
|
|
455
|
+
missing.join(", ")
|
|
456
|
+
)
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
fn copy_tree(source_root: &Path, target_root: &Path) -> Result<()> {
|
|
460
|
+
for entry in fs::read_dir(source_root)
|
|
461
|
+
.with_context(|| format!("failed to read {}", source_root.display()))?
|
|
462
|
+
{
|
|
463
|
+
let entry = entry?;
|
|
464
|
+
let source_path = entry.path();
|
|
465
|
+
let file_name = entry.file_name();
|
|
466
|
+
if should_skip_copy(&file_name) {
|
|
467
|
+
continue;
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
let target_path = target_root.join(&file_name);
|
|
471
|
+
let file_type = entry.file_type()?;
|
|
472
|
+
if file_type.is_dir() {
|
|
473
|
+
fs::create_dir_all(&target_path)
|
|
474
|
+
.with_context(|| format!("failed to create {}", target_path.display()))?;
|
|
475
|
+
copy_tree(&source_path, &target_path)?;
|
|
476
|
+
} else if file_type.is_file() {
|
|
477
|
+
fs::copy(&source_path, &target_path).with_context(|| {
|
|
478
|
+
format!(
|
|
479
|
+
"failed to copy {} to {}",
|
|
480
|
+
source_path.display(),
|
|
481
|
+
target_path.display()
|
|
482
|
+
)
|
|
483
|
+
})?;
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
Ok(())
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
fn ensure_scaffold_layout(source_root: &Path, target_root: &Path) -> Result<()> {
|
|
490
|
+
for entry in fs::read_dir(source_root)
|
|
491
|
+
.with_context(|| format!("failed to read {}", source_root.display()))?
|
|
492
|
+
{
|
|
493
|
+
let entry = entry?;
|
|
494
|
+
let source_path = entry.path();
|
|
495
|
+
let file_name = entry.file_name();
|
|
496
|
+
if should_skip_copy(&file_name) {
|
|
497
|
+
continue;
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
let target_path = target_root.join(&file_name);
|
|
501
|
+
let file_type = entry.file_type()?;
|
|
502
|
+
if file_type.is_dir() {
|
|
503
|
+
if !target_path.exists() {
|
|
504
|
+
fs::create_dir_all(&target_path)
|
|
505
|
+
.with_context(|| format!("failed to create {}", target_path.display()))?;
|
|
506
|
+
}
|
|
507
|
+
ensure_scaffold_layout(&source_path, &target_path)?;
|
|
508
|
+
} else if !target_path.exists() {
|
|
509
|
+
fs::copy(&source_path, &target_path).with_context(|| {
|
|
510
|
+
format!(
|
|
511
|
+
"failed to copy {} to {}",
|
|
512
|
+
source_path.display(),
|
|
513
|
+
target_path.display()
|
|
514
|
+
)
|
|
515
|
+
})?;
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
Ok(())
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
fn should_skip_copy(file_name: &std::ffi::OsStr) -> bool {
|
|
522
|
+
matches!(
|
|
523
|
+
file_name.to_str(),
|
|
524
|
+
Some(".git")
|
|
525
|
+
| Some(".venv")
|
|
526
|
+
| Some("node_modules")
|
|
527
|
+
| Some("target")
|
|
528
|
+
| Some("dist")
|
|
529
|
+
| Some("build")
|
|
530
|
+
| Some("__pycache__")
|
|
531
|
+
)
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
fn owned_paths() -> [&'static str; 9] {
|
|
535
|
+
[
|
|
536
|
+
"README.md",
|
|
537
|
+
".github/workflows/ci.yml",
|
|
538
|
+
".github/workflows/publish.yml",
|
|
539
|
+
".github/workflows/publish-npm.yml",
|
|
540
|
+
"core-rs/Cargo.toml",
|
|
541
|
+
"wrapper-js/package.json",
|
|
542
|
+
"wrapper-py/pyproject.toml",
|
|
543
|
+
"wrapper-js/README.md",
|
|
544
|
+
"wrapper-py/README.md",
|
|
545
|
+
]
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
fn managed_files() -> Vec<ManagedFile> {
|
|
549
|
+
vec![
|
|
550
|
+
ManagedFile {
|
|
551
|
+
path: "README.md",
|
|
552
|
+
validate: validate_root_readme,
|
|
553
|
+
sync: sync_root_readme,
|
|
554
|
+
},
|
|
555
|
+
ManagedFile {
|
|
556
|
+
path: ".github/workflows/ci.yml",
|
|
557
|
+
validate: validate_ci_workflow,
|
|
558
|
+
sync: sync_ci_workflow,
|
|
559
|
+
},
|
|
560
|
+
ManagedFile {
|
|
561
|
+
path: ".github/workflows/publish.yml",
|
|
562
|
+
validate: validate_publish_workflow,
|
|
563
|
+
sync: sync_publish_workflow,
|
|
564
|
+
},
|
|
565
|
+
ManagedFile {
|
|
566
|
+
path: ".github/workflows/publish-npm.yml",
|
|
567
|
+
validate: validate_publish_npm_workflow,
|
|
568
|
+
sync: sync_publish_npm_workflow,
|
|
569
|
+
},
|
|
570
|
+
ManagedFile {
|
|
571
|
+
path: "core-rs/Cargo.toml",
|
|
572
|
+
validate: validate_cargo_toml,
|
|
573
|
+
sync: sync_cargo_toml,
|
|
574
|
+
},
|
|
575
|
+
ManagedFile {
|
|
576
|
+
path: "wrapper-js/package.json",
|
|
577
|
+
validate: validate_package_json,
|
|
578
|
+
sync: sync_package_json,
|
|
579
|
+
},
|
|
580
|
+
ManagedFile {
|
|
581
|
+
path: "wrapper-py/pyproject.toml",
|
|
582
|
+
validate: validate_pyproject,
|
|
583
|
+
sync: sync_pyproject,
|
|
584
|
+
},
|
|
585
|
+
ManagedFile {
|
|
586
|
+
path: "wrapper-js/README.md",
|
|
587
|
+
validate: validate_js_readme,
|
|
588
|
+
sync: sync_js_readme,
|
|
589
|
+
},
|
|
590
|
+
ManagedFile {
|
|
591
|
+
path: "wrapper-py/README.md",
|
|
592
|
+
validate: validate_py_readme,
|
|
593
|
+
sync: sync_py_readme,
|
|
594
|
+
},
|
|
595
|
+
]
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
fn validate_cargo_toml(config: &ToolConfig, content: &str) -> Result<Vec<ValidationIssue>> {
|
|
599
|
+
let value: TomlValue = toml::from_str(content).context("failed to parse core-rs/Cargo.toml")?;
|
|
600
|
+
let package = value
|
|
601
|
+
.get("package")
|
|
602
|
+
.and_then(TomlValue::as_table)
|
|
603
|
+
.ok_or_else(|| anyhow!("missing [package] table in core-rs/Cargo.toml"))?;
|
|
604
|
+
|
|
605
|
+
let mut issues = Vec::new();
|
|
606
|
+
check_string_field(
|
|
607
|
+
&mut issues,
|
|
608
|
+
"core-rs/Cargo.toml",
|
|
609
|
+
"package.name",
|
|
610
|
+
package.get("name"),
|
|
611
|
+
&config.packages.rust_crate,
|
|
612
|
+
);
|
|
613
|
+
check_string_field(
|
|
614
|
+
&mut issues,
|
|
615
|
+
"core-rs/Cargo.toml",
|
|
616
|
+
"package.description",
|
|
617
|
+
package.get("description"),
|
|
618
|
+
&config.project.description,
|
|
619
|
+
);
|
|
620
|
+
check_string_field(
|
|
621
|
+
&mut issues,
|
|
622
|
+
"core-rs/Cargo.toml",
|
|
623
|
+
"package.license",
|
|
624
|
+
package.get("license"),
|
|
625
|
+
&config.project.license,
|
|
626
|
+
);
|
|
627
|
+
check_string_field(
|
|
628
|
+
&mut issues,
|
|
629
|
+
"core-rs/Cargo.toml",
|
|
630
|
+
"package.repository",
|
|
631
|
+
package.get("repository"),
|
|
632
|
+
&config.project.repository,
|
|
633
|
+
);
|
|
634
|
+
check_string_field(
|
|
635
|
+
&mut issues,
|
|
636
|
+
"core-rs/Cargo.toml",
|
|
637
|
+
"package.homepage",
|
|
638
|
+
package.get("homepage"),
|
|
639
|
+
&config.project.repository,
|
|
640
|
+
);
|
|
641
|
+
let expected_author = format!("{} <{}>", config.author.name, config.author.email);
|
|
642
|
+
let actual_author = package
|
|
643
|
+
.get("authors")
|
|
644
|
+
.and_then(TomlValue::as_array)
|
|
645
|
+
.and_then(|items| items.first())
|
|
646
|
+
.and_then(TomlValue::as_str)
|
|
647
|
+
.unwrap_or_default()
|
|
648
|
+
.to_string();
|
|
649
|
+
if actual_author != expected_author {
|
|
650
|
+
issues.push(issue(
|
|
651
|
+
"core-rs/Cargo.toml",
|
|
652
|
+
"package.authors",
|
|
653
|
+
"owned metadata differs from the canonical project identity",
|
|
654
|
+
Some(expected_author),
|
|
655
|
+
Some(actual_author),
|
|
656
|
+
));
|
|
657
|
+
}
|
|
658
|
+
Ok(issues)
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
fn sync_cargo_toml(config: &ToolConfig, content: &str) -> Result<String> {
|
|
662
|
+
let mut value: TomlValue =
|
|
663
|
+
toml::from_str(content).context("failed to parse core-rs/Cargo.toml")?;
|
|
664
|
+
let package = value
|
|
665
|
+
.get_mut("package")
|
|
666
|
+
.and_then(TomlValue::as_table_mut)
|
|
667
|
+
.ok_or_else(|| anyhow!("missing [package] table in core-rs/Cargo.toml"))?;
|
|
668
|
+
package.insert(
|
|
669
|
+
"name".into(),
|
|
670
|
+
TomlValue::String(config.packages.rust_crate.clone()),
|
|
671
|
+
);
|
|
672
|
+
package.insert(
|
|
673
|
+
"authors".into(),
|
|
674
|
+
TomlValue::Array(vec![TomlValue::String(format!(
|
|
675
|
+
"{} <{}>",
|
|
676
|
+
config.author.name, config.author.email
|
|
677
|
+
))]),
|
|
678
|
+
);
|
|
679
|
+
package.insert(
|
|
680
|
+
"description".into(),
|
|
681
|
+
TomlValue::String(config.project.description.clone()),
|
|
682
|
+
);
|
|
683
|
+
package.insert(
|
|
684
|
+
"license".into(),
|
|
685
|
+
TomlValue::String(config.project.license.clone()),
|
|
686
|
+
);
|
|
687
|
+
package.insert(
|
|
688
|
+
"repository".into(),
|
|
689
|
+
TomlValue::String(config.project.repository.clone()),
|
|
690
|
+
);
|
|
691
|
+
package.insert(
|
|
692
|
+
"homepage".into(),
|
|
693
|
+
TomlValue::String(config.project.repository.clone()),
|
|
694
|
+
);
|
|
695
|
+
Ok(toml::to_string(&value)?)
|
|
696
|
+
}
|
|
697
|
+
|
|
698
|
+
fn validate_package_json(config: &ToolConfig, content: &str) -> Result<Vec<ValidationIssue>> {
|
|
699
|
+
let value: serde_json::Value =
|
|
700
|
+
serde_json::from_str(content).context("failed to parse wrapper-js/package.json")?;
|
|
701
|
+
let mut issues = Vec::new();
|
|
702
|
+
check_json_string(
|
|
703
|
+
&mut issues,
|
|
704
|
+
"wrapper-js/package.json",
|
|
705
|
+
"name",
|
|
706
|
+
value.get("name"),
|
|
707
|
+
&config.packages.npm_package,
|
|
708
|
+
);
|
|
709
|
+
check_json_string(
|
|
710
|
+
&mut issues,
|
|
711
|
+
"wrapper-js/package.json",
|
|
712
|
+
"description",
|
|
713
|
+
value.get("description"),
|
|
714
|
+
&config.project.description,
|
|
715
|
+
);
|
|
716
|
+
check_json_string(
|
|
717
|
+
&mut issues,
|
|
718
|
+
"wrapper-js/package.json",
|
|
719
|
+
"author",
|
|
720
|
+
value.get("author"),
|
|
721
|
+
&format!("{} <{}>", config.author.name, config.author.email),
|
|
722
|
+
);
|
|
723
|
+
check_json_string(
|
|
724
|
+
&mut issues,
|
|
725
|
+
"wrapper-js/package.json",
|
|
726
|
+
"license",
|
|
727
|
+
value.get("license"),
|
|
728
|
+
&config.project.license,
|
|
729
|
+
);
|
|
730
|
+
let repo_url = value
|
|
731
|
+
.get("repository")
|
|
732
|
+
.and_then(|v| v.get("url"))
|
|
733
|
+
.and_then(serde_json::Value::as_str)
|
|
734
|
+
.unwrap_or_default()
|
|
735
|
+
.to_string();
|
|
736
|
+
if repo_url != config.project.repository {
|
|
737
|
+
issues.push(issue(
|
|
738
|
+
"wrapper-js/package.json",
|
|
739
|
+
"repository.url",
|
|
740
|
+
"owned metadata differs from the canonical project identity",
|
|
741
|
+
Some(config.project.repository.clone()),
|
|
742
|
+
Some(repo_url),
|
|
743
|
+
));
|
|
744
|
+
}
|
|
745
|
+
let bin_target = value
|
|
746
|
+
.get("bin")
|
|
747
|
+
.and_then(|v| v.get(&config.packages.command))
|
|
748
|
+
.and_then(serde_json::Value::as_str)
|
|
749
|
+
.unwrap_or_default()
|
|
750
|
+
.to_string();
|
|
751
|
+
if bin_target != "bin/ossplate.js" {
|
|
752
|
+
issues.push(issue(
|
|
753
|
+
"wrapper-js/package.json",
|
|
754
|
+
"bin",
|
|
755
|
+
"owned metadata differs from the canonical project identity",
|
|
756
|
+
Some("bin/ossplate.js".to_string()),
|
|
757
|
+
Some(bin_target),
|
|
758
|
+
));
|
|
759
|
+
}
|
|
760
|
+
Ok(issues)
|
|
761
|
+
}
|
|
762
|
+
|
|
763
|
+
fn sync_package_json(config: &ToolConfig, content: &str) -> Result<String> {
|
|
764
|
+
let mut value: serde_json::Value =
|
|
765
|
+
serde_json::from_str(content).context("failed to parse wrapper-js/package.json")?;
|
|
766
|
+
value["name"] = serde_json::Value::String(config.packages.npm_package.clone());
|
|
767
|
+
value["description"] = serde_json::Value::String(config.project.description.clone());
|
|
768
|
+
value["author"] =
|
|
769
|
+
serde_json::Value::String(format!("{} <{}>", config.author.name, config.author.email));
|
|
770
|
+
value["license"] = serde_json::Value::String(config.project.license.clone());
|
|
771
|
+
value["repository"]["url"] = serde_json::Value::String(config.project.repository.clone());
|
|
772
|
+
value["bin"] = json!({
|
|
773
|
+
config.packages.command.clone(): "bin/ossplate.js"
|
|
774
|
+
});
|
|
775
|
+
let mut rendered = serde_json::to_string_pretty(&value)?;
|
|
776
|
+
rendered.push('\n');
|
|
777
|
+
Ok(rendered)
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
fn validate_pyproject(config: &ToolConfig, content: &str) -> Result<Vec<ValidationIssue>> {
|
|
781
|
+
let value: TomlValue =
|
|
782
|
+
toml::from_str(content).context("failed to parse wrapper-py/pyproject.toml")?;
|
|
783
|
+
let project = value
|
|
784
|
+
.get("project")
|
|
785
|
+
.and_then(TomlValue::as_table)
|
|
786
|
+
.ok_or_else(|| anyhow!("missing [project] table in wrapper-py/pyproject.toml"))?;
|
|
787
|
+
let mut issues = Vec::new();
|
|
788
|
+
check_string_field(
|
|
789
|
+
&mut issues,
|
|
790
|
+
"wrapper-py/pyproject.toml",
|
|
791
|
+
"project.name",
|
|
792
|
+
project.get("name"),
|
|
793
|
+
&config.packages.python_package,
|
|
794
|
+
);
|
|
795
|
+
check_string_field(
|
|
796
|
+
&mut issues,
|
|
797
|
+
"wrapper-py/pyproject.toml",
|
|
798
|
+
"project.description",
|
|
799
|
+
project.get("description"),
|
|
800
|
+
&config.project.description,
|
|
801
|
+
);
|
|
802
|
+
let license = project
|
|
803
|
+
.get("license")
|
|
804
|
+
.and_then(TomlValue::as_table)
|
|
805
|
+
.and_then(|t| t.get("text"));
|
|
806
|
+
check_string_field(
|
|
807
|
+
&mut issues,
|
|
808
|
+
"wrapper-py/pyproject.toml",
|
|
809
|
+
"project.license.text",
|
|
810
|
+
license,
|
|
811
|
+
&config.project.license,
|
|
812
|
+
);
|
|
813
|
+
let author = project
|
|
814
|
+
.get("authors")
|
|
815
|
+
.and_then(TomlValue::as_array)
|
|
816
|
+
.and_then(|items| items.first())
|
|
817
|
+
.and_then(TomlValue::as_table);
|
|
818
|
+
let actual_name = author
|
|
819
|
+
.and_then(|item| item.get("name"))
|
|
820
|
+
.and_then(TomlValue::as_str)
|
|
821
|
+
.unwrap_or_default()
|
|
822
|
+
.to_string();
|
|
823
|
+
if actual_name != config.author.name {
|
|
824
|
+
issues.push(issue(
|
|
825
|
+
"wrapper-py/pyproject.toml",
|
|
826
|
+
"project.authors[0].name",
|
|
827
|
+
"owned metadata differs from the canonical project identity",
|
|
828
|
+
Some(config.author.name.clone()),
|
|
829
|
+
Some(actual_name),
|
|
830
|
+
));
|
|
831
|
+
}
|
|
832
|
+
let actual_email = author
|
|
833
|
+
.and_then(|item| item.get("email"))
|
|
834
|
+
.and_then(TomlValue::as_str)
|
|
835
|
+
.unwrap_or_default()
|
|
836
|
+
.to_string();
|
|
837
|
+
if actual_email != config.author.email {
|
|
838
|
+
issues.push(issue(
|
|
839
|
+
"wrapper-py/pyproject.toml",
|
|
840
|
+
"project.authors[0].email",
|
|
841
|
+
"owned metadata differs from the canonical project identity",
|
|
842
|
+
Some(config.author.email.clone()),
|
|
843
|
+
Some(actual_email),
|
|
844
|
+
));
|
|
845
|
+
}
|
|
846
|
+
let urls = project_urls(&value)?;
|
|
847
|
+
if urls.0 != config.project.repository {
|
|
848
|
+
issues.push(issue(
|
|
849
|
+
"wrapper-py/pyproject.toml",
|
|
850
|
+
"project.urls.Homepage",
|
|
851
|
+
"owned metadata differs from the canonical project identity",
|
|
852
|
+
Some(config.project.repository.clone()),
|
|
853
|
+
Some(urls.0),
|
|
854
|
+
));
|
|
855
|
+
}
|
|
856
|
+
if urls.1 != config.project.repository {
|
|
857
|
+
issues.push(issue(
|
|
858
|
+
"wrapper-py/pyproject.toml",
|
|
859
|
+
"project.urls.Repository",
|
|
860
|
+
"owned metadata differs from the canonical project identity",
|
|
861
|
+
Some(config.project.repository.clone()),
|
|
862
|
+
Some(urls.1),
|
|
863
|
+
));
|
|
864
|
+
}
|
|
865
|
+
let scripts = value
|
|
866
|
+
.get("project")
|
|
867
|
+
.and_then(TomlValue::as_table)
|
|
868
|
+
.and_then(|t| t.get("scripts"))
|
|
869
|
+
.and_then(TomlValue::as_table)
|
|
870
|
+
.ok_or_else(|| anyhow!("missing [project.scripts] in wrapper-py/pyproject.toml"))?;
|
|
871
|
+
let actual_entry = scripts
|
|
872
|
+
.get(&config.packages.command)
|
|
873
|
+
.and_then(TomlValue::as_str)
|
|
874
|
+
.unwrap_or_default()
|
|
875
|
+
.to_string();
|
|
876
|
+
if actual_entry != "ossplate.cli:main" {
|
|
877
|
+
issues.push(issue(
|
|
878
|
+
"wrapper-py/pyproject.toml",
|
|
879
|
+
"project.scripts",
|
|
880
|
+
"owned metadata differs from the canonical project identity",
|
|
881
|
+
Some("ossplate.cli:main".to_string()),
|
|
882
|
+
Some(actual_entry),
|
|
883
|
+
));
|
|
884
|
+
}
|
|
885
|
+
Ok(issues)
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
fn sync_pyproject(config: &ToolConfig, content: &str) -> Result<String> {
|
|
889
|
+
let mut value: TomlValue =
|
|
890
|
+
toml::from_str(content).context("failed to parse wrapper-py/pyproject.toml")?;
|
|
891
|
+
let project = value
|
|
892
|
+
.get_mut("project")
|
|
893
|
+
.and_then(TomlValue::as_table_mut)
|
|
894
|
+
.ok_or_else(|| anyhow!("missing [project] table in wrapper-py/pyproject.toml"))?;
|
|
895
|
+
project.insert(
|
|
896
|
+
"name".into(),
|
|
897
|
+
TomlValue::String(config.packages.python_package.clone()),
|
|
898
|
+
);
|
|
899
|
+
project.insert(
|
|
900
|
+
"description".into(),
|
|
901
|
+
TomlValue::String(config.project.description.clone()),
|
|
902
|
+
);
|
|
903
|
+
let mut license = toml::map::Map::new();
|
|
904
|
+
license.insert(
|
|
905
|
+
"text".into(),
|
|
906
|
+
TomlValue::String(config.project.license.clone()),
|
|
907
|
+
);
|
|
908
|
+
project.insert("license".into(), TomlValue::Table(license));
|
|
909
|
+
let mut author = toml::map::Map::new();
|
|
910
|
+
author.insert("name".into(), TomlValue::String(config.author.name.clone()));
|
|
911
|
+
author.insert(
|
|
912
|
+
"email".into(),
|
|
913
|
+
TomlValue::String(config.author.email.clone()),
|
|
914
|
+
);
|
|
915
|
+
project.insert(
|
|
916
|
+
"authors".into(),
|
|
917
|
+
TomlValue::Array(vec![TomlValue::Table(author)]),
|
|
918
|
+
);
|
|
919
|
+
let mut scripts = toml::map::Map::new();
|
|
920
|
+
scripts.insert(
|
|
921
|
+
config.packages.command.clone(),
|
|
922
|
+
TomlValue::String("ossplate.cli:main".to_string()),
|
|
923
|
+
);
|
|
924
|
+
project.insert("scripts".into(), TomlValue::Table(scripts));
|
|
925
|
+
let urls = value
|
|
926
|
+
.as_table_mut()
|
|
927
|
+
.and_then(|t| t.get_mut("project"))
|
|
928
|
+
.and_then(TomlValue::as_table_mut)
|
|
929
|
+
.and_then(|t| t.get_mut("urls"))
|
|
930
|
+
.and_then(TomlValue::as_table_mut)
|
|
931
|
+
.ok_or_else(|| anyhow!("missing [project.urls] in wrapper-py/pyproject.toml"))?;
|
|
932
|
+
urls.insert(
|
|
933
|
+
"Homepage".into(),
|
|
934
|
+
TomlValue::String(config.project.repository.clone()),
|
|
935
|
+
);
|
|
936
|
+
urls.insert(
|
|
937
|
+
"Repository".into(),
|
|
938
|
+
TomlValue::String(config.project.repository.clone()),
|
|
939
|
+
);
|
|
940
|
+
Ok(toml::to_string(&value)?)
|
|
941
|
+
}
|
|
942
|
+
|
|
943
|
+
fn validate_js_readme(config: &ToolConfig, content: &str) -> Result<Vec<ValidationIssue>> {
|
|
944
|
+
validate_wrapper_readme("wrapper-js/README.md", "JavaScript", config, content)
|
|
945
|
+
}
|
|
946
|
+
|
|
947
|
+
fn sync_js_readme(config: &ToolConfig, _content: &str) -> Result<String> {
|
|
948
|
+
Ok(render_wrapper_readme("JavaScript", config))
|
|
949
|
+
}
|
|
950
|
+
|
|
951
|
+
fn validate_py_readme(config: &ToolConfig, content: &str) -> Result<Vec<ValidationIssue>> {
|
|
952
|
+
validate_wrapper_readme("wrapper-py/README.md", "Python", config, content)
|
|
953
|
+
}
|
|
954
|
+
|
|
955
|
+
fn sync_py_readme(config: &ToolConfig, _content: &str) -> Result<String> {
|
|
956
|
+
Ok(render_wrapper_readme("Python", config))
|
|
957
|
+
}
|
|
958
|
+
|
|
959
|
+
fn validate_root_readme(config: &ToolConfig, content: &str) -> Result<Vec<ValidationIssue>> {
|
|
960
|
+
let expected = render_root_readme_identity(config);
|
|
961
|
+
let actual = extract_marked_section(content, README_IDENTITY_START, README_IDENTITY_END)?;
|
|
962
|
+
if actual == expected {
|
|
963
|
+
Ok(Vec::new())
|
|
964
|
+
} else {
|
|
965
|
+
Ok(vec![issue(
|
|
966
|
+
"README.md",
|
|
967
|
+
"readme.identity",
|
|
968
|
+
"owned metadata differs from the canonical project identity",
|
|
969
|
+
Some(expected),
|
|
970
|
+
Some(actual),
|
|
971
|
+
)])
|
|
972
|
+
}
|
|
973
|
+
}
|
|
974
|
+
|
|
975
|
+
fn sync_root_readme(config: &ToolConfig, content: &str) -> Result<String> {
|
|
976
|
+
replace_marked_section(
|
|
977
|
+
content,
|
|
978
|
+
README_IDENTITY_START,
|
|
979
|
+
README_IDENTITY_END,
|
|
980
|
+
&render_root_readme_identity(config),
|
|
981
|
+
)
|
|
982
|
+
}
|
|
983
|
+
|
|
984
|
+
fn validate_ci_workflow(config: &ToolConfig, content: &str) -> Result<Vec<ValidationIssue>> {
|
|
985
|
+
validate_workflow_name(
|
|
986
|
+
".github/workflows/ci.yml",
|
|
987
|
+
&format!("{} CI", config.project.name),
|
|
988
|
+
content,
|
|
989
|
+
)
|
|
990
|
+
}
|
|
991
|
+
|
|
992
|
+
fn sync_ci_workflow(config: &ToolConfig, content: &str) -> Result<String> {
|
|
993
|
+
sync_workflow_name(content, &format!("{} CI", config.project.name))
|
|
994
|
+
}
|
|
995
|
+
|
|
996
|
+
fn validate_publish_workflow(config: &ToolConfig, content: &str) -> Result<Vec<ValidationIssue>> {
|
|
997
|
+
validate_workflow_name(
|
|
998
|
+
".github/workflows/publish.yml",
|
|
999
|
+
&format!("{} publishing", config.project.name),
|
|
1000
|
+
content,
|
|
1001
|
+
)
|
|
1002
|
+
}
|
|
1003
|
+
|
|
1004
|
+
fn sync_publish_workflow(config: &ToolConfig, content: &str) -> Result<String> {
|
|
1005
|
+
sync_workflow_name(content, &format!("{} publishing", config.project.name))
|
|
1006
|
+
}
|
|
1007
|
+
|
|
1008
|
+
fn validate_publish_npm_workflow(
|
|
1009
|
+
config: &ToolConfig,
|
|
1010
|
+
content: &str,
|
|
1011
|
+
) -> Result<Vec<ValidationIssue>> {
|
|
1012
|
+
validate_workflow_name(
|
|
1013
|
+
".github/workflows/publish-npm.yml",
|
|
1014
|
+
&format!("{} publish-npm", config.project.name),
|
|
1015
|
+
content,
|
|
1016
|
+
)
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
fn sync_publish_npm_workflow(config: &ToolConfig, content: &str) -> Result<String> {
|
|
1020
|
+
sync_workflow_name(content, &format!("{} publish-npm", config.project.name))
|
|
1021
|
+
}
|
|
1022
|
+
|
|
1023
|
+
fn validate_wrapper_readme(
|
|
1024
|
+
path: &str,
|
|
1025
|
+
language: &str,
|
|
1026
|
+
config: &ToolConfig,
|
|
1027
|
+
content: &str,
|
|
1028
|
+
) -> Result<Vec<ValidationIssue>> {
|
|
1029
|
+
let expected = render_wrapper_readme(language, config);
|
|
1030
|
+
if content == expected {
|
|
1031
|
+
Ok(Vec::new())
|
|
1032
|
+
} else {
|
|
1033
|
+
Ok(vec![issue(
|
|
1034
|
+
path,
|
|
1035
|
+
"readme.identity",
|
|
1036
|
+
"owned metadata differs from the canonical project identity",
|
|
1037
|
+
Some(expected),
|
|
1038
|
+
Some(content.to_string()),
|
|
1039
|
+
)])
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
|
|
1043
|
+
fn render_wrapper_readme(language: &str, config: &ToolConfig) -> String {
|
|
1044
|
+
format!(
|
|
1045
|
+
r#"# {language} Wrapper For {name}
|
|
1046
|
+
|
|
1047
|
+
This package is the {language} wrapper surface for {name}.
|
|
1048
|
+
|
|
1049
|
+
It delegates to the canonical Rust binary instead of implementing its own CLI behavior.
|
|
1050
|
+
|
|
1051
|
+
Use `OSSPLATE_BINARY` during local development to point the wrapper at a specific binary.
|
|
1052
|
+
"#,
|
|
1053
|
+
language = language,
|
|
1054
|
+
name = config.project.name,
|
|
1055
|
+
)
|
|
1056
|
+
}
|
|
1057
|
+
|
|
1058
|
+
fn render_root_readme_identity(config: &ToolConfig) -> String {
|
|
1059
|
+
format!(
|
|
1060
|
+
"# {}\n\n{}\n",
|
|
1061
|
+
config.project.name, config.project.description
|
|
1062
|
+
)
|
|
1063
|
+
}
|
|
1064
|
+
|
|
1065
|
+
fn validate_workflow_name(
|
|
1066
|
+
path: &str,
|
|
1067
|
+
expected_name: &str,
|
|
1068
|
+
content: &str,
|
|
1069
|
+
) -> Result<Vec<ValidationIssue>> {
|
|
1070
|
+
let expected = format!("name: {}\n", expected_name);
|
|
1071
|
+
let actual = extract_marked_section(content, WORKFLOW_NAME_START, WORKFLOW_NAME_END)?;
|
|
1072
|
+
if actual == expected {
|
|
1073
|
+
Ok(Vec::new())
|
|
1074
|
+
} else {
|
|
1075
|
+
Ok(vec![issue(
|
|
1076
|
+
path,
|
|
1077
|
+
"workflow.name",
|
|
1078
|
+
"owned metadata differs from the canonical project identity",
|
|
1079
|
+
Some(expected),
|
|
1080
|
+
Some(actual),
|
|
1081
|
+
)])
|
|
1082
|
+
}
|
|
1083
|
+
}
|
|
1084
|
+
|
|
1085
|
+
fn sync_workflow_name(content: &str, expected_name: &str) -> Result<String> {
|
|
1086
|
+
replace_marked_section(
|
|
1087
|
+
content,
|
|
1088
|
+
WORKFLOW_NAME_START,
|
|
1089
|
+
WORKFLOW_NAME_END,
|
|
1090
|
+
&format!("name: {}\n", expected_name),
|
|
1091
|
+
)
|
|
1092
|
+
}
|
|
1093
|
+
|
|
1094
|
+
fn issue(
|
|
1095
|
+
file: &str,
|
|
1096
|
+
code: &str,
|
|
1097
|
+
message: &str,
|
|
1098
|
+
expected: Option<String>,
|
|
1099
|
+
actual: Option<String>,
|
|
1100
|
+
) -> ValidationIssue {
|
|
1101
|
+
ValidationIssue {
|
|
1102
|
+
code: code.to_string(),
|
|
1103
|
+
file: file.to_string(),
|
|
1104
|
+
message: message.to_string(),
|
|
1105
|
+
expected,
|
|
1106
|
+
actual,
|
|
1107
|
+
}
|
|
1108
|
+
}
|
|
1109
|
+
|
|
1110
|
+
fn format_human_issues(header: &str, issues: &[ValidationIssue]) -> String {
|
|
1111
|
+
let mut grouped = BTreeMap::<&str, Vec<&ValidationIssue>>::new();
|
|
1112
|
+
for issue in issues {
|
|
1113
|
+
grouped.entry(&issue.file).or_default().push(issue);
|
|
1114
|
+
}
|
|
1115
|
+
|
|
1116
|
+
let mut lines = vec![header.to_string()];
|
|
1117
|
+
for (file, file_issues) in grouped {
|
|
1118
|
+
lines.push(format!("- {}", file));
|
|
1119
|
+
for issue in file_issues {
|
|
1120
|
+
lines.push(format!(" [{}] {}", issue.code, issue.message));
|
|
1121
|
+
if let Some(expected) = &issue.expected {
|
|
1122
|
+
lines.push(format!(" expected: {}", summarize_value(expected)));
|
|
1123
|
+
}
|
|
1124
|
+
if let Some(actual) = &issue.actual {
|
|
1125
|
+
lines.push(format!(" actual: {}", summarize_value(actual)));
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
}
|
|
1129
|
+
lines.join("\n")
|
|
1130
|
+
}
|
|
1131
|
+
|
|
1132
|
+
fn summarize_value(value: &str) -> String {
|
|
1133
|
+
let trimmed = value.trim();
|
|
1134
|
+
if trimmed.is_empty() {
|
|
1135
|
+
return "\"\"".to_string();
|
|
1136
|
+
}
|
|
1137
|
+
|
|
1138
|
+
let flattened = trimmed.replace('\n', "\\n");
|
|
1139
|
+
if flattened.len() <= 80 {
|
|
1140
|
+
format!("{flattened:?}")
|
|
1141
|
+
} else {
|
|
1142
|
+
format!("{:?}…", &flattened[..80])
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
|
|
1146
|
+
fn extract_marked_section(content: &str, start: &str, end: &str) -> Result<String> {
|
|
1147
|
+
let start_index = content
|
|
1148
|
+
.find(start)
|
|
1149
|
+
.ok_or_else(|| anyhow!("missing marker {}", start))?;
|
|
1150
|
+
let section_start = start_index + start.len();
|
|
1151
|
+
let end_index = content[section_start..]
|
|
1152
|
+
.find(end)
|
|
1153
|
+
.map(|index| section_start + index)
|
|
1154
|
+
.ok_or_else(|| anyhow!("missing marker {}", end))?;
|
|
1155
|
+
Ok(content[section_start..end_index]
|
|
1156
|
+
.trim_matches('\n')
|
|
1157
|
+
.to_string()
|
|
1158
|
+
+ "\n")
|
|
1159
|
+
}
|
|
1160
|
+
|
|
1161
|
+
fn replace_marked_section(
|
|
1162
|
+
content: &str,
|
|
1163
|
+
start: &str,
|
|
1164
|
+
end: &str,
|
|
1165
|
+
replacement: &str,
|
|
1166
|
+
) -> Result<String> {
|
|
1167
|
+
let start_index = content
|
|
1168
|
+
.find(start)
|
|
1169
|
+
.ok_or_else(|| anyhow!("missing marker {}", start))?;
|
|
1170
|
+
let section_start = start_index + start.len();
|
|
1171
|
+
let end_index = content[section_start..]
|
|
1172
|
+
.find(end)
|
|
1173
|
+
.map(|index| section_start + index)
|
|
1174
|
+
.ok_or_else(|| anyhow!("missing marker {}", end))?;
|
|
1175
|
+
|
|
1176
|
+
let mut rendered = String::new();
|
|
1177
|
+
rendered.push_str(&content[..section_start]);
|
|
1178
|
+
rendered.push('\n');
|
|
1179
|
+
rendered.push_str(replacement.trim_end());
|
|
1180
|
+
rendered.push('\n');
|
|
1181
|
+
rendered.push_str(&content[end_index..]);
|
|
1182
|
+
Ok(rendered)
|
|
1183
|
+
}
|
|
1184
|
+
|
|
1185
|
+
fn check_string_field(
|
|
1186
|
+
issues: &mut Vec<ValidationIssue>,
|
|
1187
|
+
file: &str,
|
|
1188
|
+
code: &str,
|
|
1189
|
+
value: Option<&TomlValue>,
|
|
1190
|
+
expected: &str,
|
|
1191
|
+
) {
|
|
1192
|
+
let actual = value
|
|
1193
|
+
.and_then(TomlValue::as_str)
|
|
1194
|
+
.unwrap_or_default()
|
|
1195
|
+
.to_string();
|
|
1196
|
+
if actual != expected {
|
|
1197
|
+
issues.push(issue(
|
|
1198
|
+
file,
|
|
1199
|
+
code,
|
|
1200
|
+
"owned metadata differs from the canonical project identity",
|
|
1201
|
+
Some(expected.to_string()),
|
|
1202
|
+
Some(actual),
|
|
1203
|
+
));
|
|
1204
|
+
}
|
|
1205
|
+
}
|
|
1206
|
+
|
|
1207
|
+
fn check_json_string(
|
|
1208
|
+
issues: &mut Vec<ValidationIssue>,
|
|
1209
|
+
file: &str,
|
|
1210
|
+
code: &str,
|
|
1211
|
+
value: Option<&serde_json::Value>,
|
|
1212
|
+
expected: &str,
|
|
1213
|
+
) {
|
|
1214
|
+
let actual = value
|
|
1215
|
+
.and_then(serde_json::Value::as_str)
|
|
1216
|
+
.unwrap_or_default()
|
|
1217
|
+
.to_string();
|
|
1218
|
+
if actual != expected {
|
|
1219
|
+
issues.push(issue(
|
|
1220
|
+
file,
|
|
1221
|
+
code,
|
|
1222
|
+
"owned metadata differs from the canonical project identity",
|
|
1223
|
+
Some(expected.to_string()),
|
|
1224
|
+
Some(actual),
|
|
1225
|
+
));
|
|
1226
|
+
}
|
|
1227
|
+
}
|
|
1228
|
+
|
|
1229
|
+
fn project_urls(value: &TomlValue) -> Result<(String, String)> {
|
|
1230
|
+
let urls = value
|
|
1231
|
+
.as_table()
|
|
1232
|
+
.and_then(|t| t.get("project"))
|
|
1233
|
+
.and_then(TomlValue::as_table)
|
|
1234
|
+
.and_then(|t| t.get("urls"))
|
|
1235
|
+
.and_then(TomlValue::as_table)
|
|
1236
|
+
.ok_or_else(|| anyhow!("missing [project.urls] in wrapper-py/pyproject.toml"))?;
|
|
1237
|
+
Ok((
|
|
1238
|
+
urls.get("Homepage")
|
|
1239
|
+
.and_then(TomlValue::as_str)
|
|
1240
|
+
.unwrap_or_default()
|
|
1241
|
+
.to_string(),
|
|
1242
|
+
urls.get("Repository")
|
|
1243
|
+
.and_then(TomlValue::as_str)
|
|
1244
|
+
.unwrap_or_default()
|
|
1245
|
+
.to_string(),
|
|
1246
|
+
))
|
|
1247
|
+
}
|
|
1248
|
+
|
|
1249
|
+
#[cfg(test)]
|
|
1250
|
+
mod tests {
|
|
1251
|
+
use super::*;
|
|
1252
|
+
use std::time::{SystemTime, UNIX_EPOCH};
|
|
1253
|
+
|
|
1254
|
+
#[test]
|
|
1255
|
+
fn version_output_serializes() {
|
|
1256
|
+
let json = serde_json::to_string(&VersionOutput {
|
|
1257
|
+
tool: "ossplate",
|
|
1258
|
+
version: "0.1.0",
|
|
1259
|
+
})
|
|
1260
|
+
.unwrap();
|
|
1261
|
+
assert_eq!(json, r#"{"tool":"ossplate","version":"0.1.0"}"#);
|
|
1262
|
+
}
|
|
1263
|
+
|
|
1264
|
+
#[test]
|
|
1265
|
+
fn validate_detects_drift() {
|
|
1266
|
+
let root = make_fixture_root();
|
|
1267
|
+
fs::write(
|
|
1268
|
+
root.join("wrapper-js/package.json"),
|
|
1269
|
+
"{\n \"name\": \"bad\",\n \"description\": \"A practical baseline for shipping one project across Cargo, npm, and PyPI without starting from scratch every time.\",\n \"bin\": { \"ossplate\": \"bin/ossplate.js\" },\n \"author\": \"Stef <stefdevscore@github.com>\",\n \"license\": \"Unlicense\",\n \"repository\": { \"url\": \"https://github.com/stefdevscore/ossplate\" }\n}\n",
|
|
1270
|
+
)
|
|
1271
|
+
.unwrap();
|
|
1272
|
+
let output = validate_repo(&root).unwrap();
|
|
1273
|
+
assert!(!output.ok);
|
|
1274
|
+
assert!(output
|
|
1275
|
+
.issues
|
|
1276
|
+
.iter()
|
|
1277
|
+
.any(|issue| issue.file == "wrapper-js/package.json"));
|
|
1278
|
+
}
|
|
1279
|
+
|
|
1280
|
+
#[test]
|
|
1281
|
+
fn sync_check_detects_drift_and_sync_fixes_it() {
|
|
1282
|
+
let root = make_fixture_root();
|
|
1283
|
+
fs::write(
|
|
1284
|
+
root.join("wrapper-js/package.json"),
|
|
1285
|
+
"{\n \"name\": \"bad\"\n}\n",
|
|
1286
|
+
)
|
|
1287
|
+
.unwrap();
|
|
1288
|
+
|
|
1289
|
+
let error = sync_repo(&root, true).unwrap_err().to_string();
|
|
1290
|
+
assert!(error.contains("sync check failed"));
|
|
1291
|
+
sync_repo(&root, false).unwrap();
|
|
1292
|
+
assert!(sync_repo(&root, true).is_ok());
|
|
1293
|
+
assert!(validate_repo(&root).unwrap().ok);
|
|
1294
|
+
}
|
|
1295
|
+
|
|
1296
|
+
#[test]
|
|
1297
|
+
fn human_issue_output_groups_by_file() {
|
|
1298
|
+
let rendered = format_human_issues(
|
|
1299
|
+
"validation failed:",
|
|
1300
|
+
&[
|
|
1301
|
+
issue(
|
|
1302
|
+
"README.md",
|
|
1303
|
+
"readme.identity",
|
|
1304
|
+
"owned metadata differs",
|
|
1305
|
+
Some("expected title".to_string()),
|
|
1306
|
+
Some("actual title".to_string()),
|
|
1307
|
+
),
|
|
1308
|
+
issue(
|
|
1309
|
+
"wrapper-js/package.json",
|
|
1310
|
+
"name",
|
|
1311
|
+
"owned metadata differs",
|
|
1312
|
+
Some("expected-name".to_string()),
|
|
1313
|
+
Some("actual-name".to_string()),
|
|
1314
|
+
),
|
|
1315
|
+
],
|
|
1316
|
+
);
|
|
1317
|
+
|
|
1318
|
+
assert!(rendered.contains("validation failed:"));
|
|
1319
|
+
assert!(rendered.contains("- README.md"));
|
|
1320
|
+
assert!(rendered.contains("- wrapper-js/package.json"));
|
|
1321
|
+
assert!(rendered.contains("expected: \"expected title\""));
|
|
1322
|
+
assert!(rendered.contains("actual: \"actual-name\""));
|
|
1323
|
+
}
|
|
1324
|
+
|
|
1325
|
+
#[test]
|
|
1326
|
+
fn parses_validate_subcommand() {
|
|
1327
|
+
let cli = Cli::try_parse_from(["ossplate", "validate", "--json"]).unwrap();
|
|
1328
|
+
match cli.command {
|
|
1329
|
+
Commands::Validate { json, .. } => assert!(json),
|
|
1330
|
+
_ => panic!("expected validate"),
|
|
1331
|
+
}
|
|
1332
|
+
}
|
|
1333
|
+
|
|
1334
|
+
#[test]
|
|
1335
|
+
fn parses_create_with_identity_overrides() {
|
|
1336
|
+
let cli = Cli::try_parse_from([
|
|
1337
|
+
"ossplate",
|
|
1338
|
+
"create",
|
|
1339
|
+
"demo",
|
|
1340
|
+
"--name",
|
|
1341
|
+
"Demo Tool",
|
|
1342
|
+
"--command",
|
|
1343
|
+
"demo-tool",
|
|
1344
|
+
])
|
|
1345
|
+
.unwrap();
|
|
1346
|
+
match cli.command {
|
|
1347
|
+
Commands::Create { target, overrides } => {
|
|
1348
|
+
assert_eq!(target, PathBuf::from("demo"));
|
|
1349
|
+
assert_eq!(overrides.name.as_deref(), Some("Demo Tool"));
|
|
1350
|
+
assert_eq!(overrides.command.as_deref(), Some("demo-tool"));
|
|
1351
|
+
}
|
|
1352
|
+
_ => panic!("expected create"),
|
|
1353
|
+
}
|
|
1354
|
+
}
|
|
1355
|
+
|
|
1356
|
+
#[test]
|
|
1357
|
+
fn create_scaffolds_a_target_directory() {
|
|
1358
|
+
let source_root = make_fixture_root();
|
|
1359
|
+
let target = std::env::temp_dir().join("ossplate-create-target");
|
|
1360
|
+
if target.exists() {
|
|
1361
|
+
fs::remove_dir_all(&target).unwrap();
|
|
1362
|
+
}
|
|
1363
|
+
|
|
1364
|
+
create_scaffold_from(&source_root, &target, &IdentityOverrides::default()).unwrap();
|
|
1365
|
+
assert!(target.join("ossplate.toml").exists());
|
|
1366
|
+
assert!(target.join("core-rs/Cargo.toml").exists());
|
|
1367
|
+
assert!(validate_repo(&target).unwrap().ok);
|
|
1368
|
+
|
|
1369
|
+
fs::remove_dir_all(&target).unwrap();
|
|
1370
|
+
}
|
|
1371
|
+
|
|
1372
|
+
#[test]
|
|
1373
|
+
fn init_hydrates_an_existing_directory() {
|
|
1374
|
+
let source_root = make_fixture_root();
|
|
1375
|
+
let target = std::env::temp_dir().join("ossplate-init-target");
|
|
1376
|
+
if target.exists() {
|
|
1377
|
+
fs::remove_dir_all(&target).unwrap();
|
|
1378
|
+
}
|
|
1379
|
+
fs::create_dir_all(target.join("core-rs")).unwrap();
|
|
1380
|
+
fs::write(
|
|
1381
|
+
target.join("ossplate.toml"),
|
|
1382
|
+
fs::read_to_string(source_root.join("ossplate.toml")).unwrap(),
|
|
1383
|
+
)
|
|
1384
|
+
.unwrap();
|
|
1385
|
+
fs::write(
|
|
1386
|
+
target.join("core-rs/Cargo.toml"),
|
|
1387
|
+
r#"[package]
|
|
1388
|
+
name = "bad-core"
|
|
1389
|
+
version = "0.1.0"
|
|
1390
|
+
"#,
|
|
1391
|
+
)
|
|
1392
|
+
.unwrap();
|
|
1393
|
+
|
|
1394
|
+
init_scaffold_from(&source_root, &target, &IdentityOverrides::default()).unwrap();
|
|
1395
|
+
assert!(target.join("wrapper-js/package.json").exists());
|
|
1396
|
+
assert!(target.join("wrapper-py/pyproject.toml").exists());
|
|
1397
|
+
assert!(validate_repo(&target).unwrap().ok);
|
|
1398
|
+
|
|
1399
|
+
fs::remove_dir_all(&target).unwrap();
|
|
1400
|
+
}
|
|
1401
|
+
|
|
1402
|
+
#[test]
|
|
1403
|
+
fn create_applies_identity_overrides_before_sync() {
|
|
1404
|
+
let source_root = make_fixture_root();
|
|
1405
|
+
let target = std::env::temp_dir().join("ossplate-create-with-overrides");
|
|
1406
|
+
if target.exists() {
|
|
1407
|
+
fs::remove_dir_all(&target).unwrap();
|
|
1408
|
+
}
|
|
1409
|
+
|
|
1410
|
+
create_scaffold_from(
|
|
1411
|
+
&source_root,
|
|
1412
|
+
&target,
|
|
1413
|
+
&IdentityOverrides {
|
|
1414
|
+
name: Some("Demo Tool".to_string()),
|
|
1415
|
+
description: Some("A demo scaffold".to_string()),
|
|
1416
|
+
repository: Some("https://example.com/demo".to_string()),
|
|
1417
|
+
license: Some("Apache-2.0".to_string()),
|
|
1418
|
+
author_name: Some("Demo Dev".to_string()),
|
|
1419
|
+
author_email: Some("demo@example.com".to_string()),
|
|
1420
|
+
rust_crate: Some("demo-core".to_string()),
|
|
1421
|
+
npm_package: Some("demo-wrapper-js".to_string()),
|
|
1422
|
+
python_package: Some("demo-wrapper-py".to_string()),
|
|
1423
|
+
command: Some("demo-tool".to_string()),
|
|
1424
|
+
},
|
|
1425
|
+
)
|
|
1426
|
+
.unwrap();
|
|
1427
|
+
|
|
1428
|
+
let config = load_config(&target).unwrap();
|
|
1429
|
+
assert_eq!(config.project.name, "Demo Tool");
|
|
1430
|
+
assert_eq!(config.packages.command, "demo-tool");
|
|
1431
|
+
assert!(validate_repo(&target).unwrap().ok);
|
|
1432
|
+
|
|
1433
|
+
fs::remove_dir_all(&target).unwrap();
|
|
1434
|
+
}
|
|
1435
|
+
|
|
1436
|
+
#[test]
|
|
1437
|
+
fn sync_preserves_unowned_root_readme_content() {
|
|
1438
|
+
let root = make_fixture_root();
|
|
1439
|
+
let original = fs::read_to_string(root.join("README.md")).unwrap();
|
|
1440
|
+
fs::write(
|
|
1441
|
+
root.join("README.md"),
|
|
1442
|
+
original.replace(
|
|
1443
|
+
"A practical baseline for shipping one project",
|
|
1444
|
+
"Changed identity text",
|
|
1445
|
+
),
|
|
1446
|
+
)
|
|
1447
|
+
.unwrap();
|
|
1448
|
+
|
|
1449
|
+
sync_repo(&root, false).unwrap();
|
|
1450
|
+
let synced = fs::read_to_string(root.join("README.md")).unwrap();
|
|
1451
|
+
assert!(synced.contains("## What This Tool Gives You"));
|
|
1452
|
+
assert!(synced.contains("A practical baseline for shipping one project"));
|
|
1453
|
+
}
|
|
1454
|
+
|
|
1455
|
+
#[test]
|
|
1456
|
+
fn create_fails_when_scaffold_source_is_incomplete() {
|
|
1457
|
+
let source_root = std::env::temp_dir().join("ossplate-incomplete-source");
|
|
1458
|
+
if source_root.exists() {
|
|
1459
|
+
fs::remove_dir_all(&source_root).unwrap();
|
|
1460
|
+
}
|
|
1461
|
+
fs::create_dir_all(&source_root).unwrap();
|
|
1462
|
+
fs::write(
|
|
1463
|
+
source_root.join("ossplate.toml"),
|
|
1464
|
+
fs::read_to_string(make_fixture_root().join("ossplate.toml")).unwrap(),
|
|
1465
|
+
)
|
|
1466
|
+
.unwrap();
|
|
1467
|
+
|
|
1468
|
+
let error = ensure_scaffold_source_root(&source_root).unwrap_err();
|
|
1469
|
+
assert!(error
|
|
1470
|
+
.to_string()
|
|
1471
|
+
.contains("require a full scaffold source checkout"));
|
|
1472
|
+
|
|
1473
|
+
fs::remove_dir_all(&source_root).unwrap();
|
|
1474
|
+
}
|
|
1475
|
+
|
|
1476
|
+
#[test]
|
|
1477
|
+
fn discover_template_root_honors_env_override() {
|
|
1478
|
+
let source_root = make_fixture_root();
|
|
1479
|
+
unsafe {
|
|
1480
|
+
std::env::set_var("OSSPLATE_TEMPLATE_ROOT", &source_root);
|
|
1481
|
+
}
|
|
1482
|
+
let discovered = discover_template_root().unwrap();
|
|
1483
|
+
unsafe {
|
|
1484
|
+
std::env::remove_var("OSSPLATE_TEMPLATE_ROOT");
|
|
1485
|
+
}
|
|
1486
|
+
assert_eq!(discovered, source_root);
|
|
1487
|
+
}
|
|
1488
|
+
|
|
1489
|
+
fn make_fixture_root() -> PathBuf {
|
|
1490
|
+
let unique = SystemTime::now()
|
|
1491
|
+
.duration_since(UNIX_EPOCH)
|
|
1492
|
+
.unwrap()
|
|
1493
|
+
.as_nanos();
|
|
1494
|
+
let root = std::env::temp_dir().join(format!("ossplate-fixture-{unique}"));
|
|
1495
|
+
fs::create_dir_all(root.join(".github/workflows")).unwrap();
|
|
1496
|
+
fs::create_dir_all(root.join("core-rs")).unwrap();
|
|
1497
|
+
fs::create_dir_all(root.join("wrapper-js")).unwrap();
|
|
1498
|
+
fs::create_dir_all(root.join("wrapper-py")).unwrap();
|
|
1499
|
+
let config = r#"[project]
|
|
1500
|
+
name = "Ossplate"
|
|
1501
|
+
slug = "ossplate"
|
|
1502
|
+
description = "A practical baseline for shipping one project across Cargo, npm, and PyPI without starting from scratch every time."
|
|
1503
|
+
repository = "https://github.com/stefdevscore/ossplate"
|
|
1504
|
+
license = "Unlicense"
|
|
1505
|
+
|
|
1506
|
+
[author]
|
|
1507
|
+
name = "Stef"
|
|
1508
|
+
email = "stefdevscore@github.com"
|
|
1509
|
+
|
|
1510
|
+
[packages]
|
|
1511
|
+
rust_crate = "ossplate"
|
|
1512
|
+
npm_package = "ossplate"
|
|
1513
|
+
python_package = "ossplate"
|
|
1514
|
+
command = "ossplate"
|
|
1515
|
+
"#;
|
|
1516
|
+
fs::write(
|
|
1517
|
+
root.join("ossplate.toml"),
|
|
1518
|
+
config.replace("slug = \"ossplate\"\n", ""),
|
|
1519
|
+
)
|
|
1520
|
+
.unwrap();
|
|
1521
|
+
fs::write(
|
|
1522
|
+
root.join("core-rs/Cargo.toml"),
|
|
1523
|
+
r#"[package]
|
|
1524
|
+
name = "ossplate"
|
|
1525
|
+
version = "0.1.0"
|
|
1526
|
+
edition = "2021"
|
|
1527
|
+
authors = ["Stef <stefdevscore@github.com>"]
|
|
1528
|
+
description = "A practical baseline for shipping one project across Cargo, npm, and PyPI without starting from scratch every time."
|
|
1529
|
+
license = "Unlicense"
|
|
1530
|
+
readme = "../README.md"
|
|
1531
|
+
repository = "https://github.com/stefdevscore/ossplate"
|
|
1532
|
+
homepage = "https://github.com/stefdevscore/ossplate"
|
|
1533
|
+
"#,
|
|
1534
|
+
)
|
|
1535
|
+
.unwrap();
|
|
1536
|
+
fs::write(
|
|
1537
|
+
root.join("wrapper-js/package.json"),
|
|
1538
|
+
"{\n \"name\": \"ossplate\",\n \"description\": \"A practical baseline for shipping one project across Cargo, npm, and PyPI without starting from scratch every time.\",\n \"bin\": { \"ossplate\": \"bin/ossplate.js\" },\n \"author\": \"Stef <stefdevscore@github.com>\",\n \"license\": \"Unlicense\",\n \"repository\": { \"url\": \"https://github.com/stefdevscore/ossplate\" }\n}\n",
|
|
1539
|
+
)
|
|
1540
|
+
.unwrap();
|
|
1541
|
+
fs::write(
|
|
1542
|
+
root.join("wrapper-py/pyproject.toml"),
|
|
1543
|
+
r#"[project]
|
|
1544
|
+
name = "ossplate"
|
|
1545
|
+
description = "A practical baseline for shipping one project across Cargo, npm, and PyPI without starting from scratch every time."
|
|
1546
|
+
license = { text = "Unlicense" }
|
|
1547
|
+
authors = [
|
|
1548
|
+
{ name = "Stef", email = "stefdevscore@github.com" }
|
|
1549
|
+
]
|
|
1550
|
+
|
|
1551
|
+
[project.urls]
|
|
1552
|
+
Homepage = "https://github.com/stefdevscore/ossplate"
|
|
1553
|
+
Repository = "https://github.com/stefdevscore/ossplate"
|
|
1554
|
+
|
|
1555
|
+
[project.scripts]
|
|
1556
|
+
ossplate = "ossplate.cli:main"
|
|
1557
|
+
"#,
|
|
1558
|
+
)
|
|
1559
|
+
.unwrap();
|
|
1560
|
+
fs::write(
|
|
1561
|
+
root.join(".github/workflows/ci.yml"),
|
|
1562
|
+
format!(
|
|
1563
|
+
"{start}\nname: Ossplate CI\n{end}\n\non:\n push:\n branches:\n - main\n",
|
|
1564
|
+
start = WORKFLOW_NAME_START,
|
|
1565
|
+
end = WORKFLOW_NAME_END
|
|
1566
|
+
),
|
|
1567
|
+
)
|
|
1568
|
+
.unwrap();
|
|
1569
|
+
fs::write(
|
|
1570
|
+
root.join(".github/workflows/publish.yml"),
|
|
1571
|
+
format!(
|
|
1572
|
+
"{start}\nname: Ossplate publishing\n{end}\n\non:\n workflow_dispatch:\n",
|
|
1573
|
+
start = WORKFLOW_NAME_START,
|
|
1574
|
+
end = WORKFLOW_NAME_END
|
|
1575
|
+
),
|
|
1576
|
+
)
|
|
1577
|
+
.unwrap();
|
|
1578
|
+
fs::write(
|
|
1579
|
+
root.join(".github/workflows/publish-npm.yml"),
|
|
1580
|
+
format!(
|
|
1581
|
+
"{start}\nname: Ossplate publish-npm\n{end}\n\non:\n workflow_dispatch:\n",
|
|
1582
|
+
start = WORKFLOW_NAME_START,
|
|
1583
|
+
end = WORKFLOW_NAME_END
|
|
1584
|
+
),
|
|
1585
|
+
)
|
|
1586
|
+
.unwrap();
|
|
1587
|
+
fs::write(
|
|
1588
|
+
root.join("README.md"),
|
|
1589
|
+
format!(
|
|
1590
|
+
"{start}\n{body}{end}\n\n## What This Tool Gives You\n\n- a canonical Rust CLI in [`core-rs/`](./core-rs)\n",
|
|
1591
|
+
start = README_IDENTITY_START,
|
|
1592
|
+
body = render_root_readme_identity(&load_config(&root).unwrap()),
|
|
1593
|
+
end = README_IDENTITY_END
|
|
1594
|
+
),
|
|
1595
|
+
)
|
|
1596
|
+
.unwrap();
|
|
1597
|
+
fs::write(
|
|
1598
|
+
root.join("wrapper-js/README.md"),
|
|
1599
|
+
render_wrapper_readme("JavaScript", &load_config(&root).unwrap()),
|
|
1600
|
+
)
|
|
1601
|
+
.unwrap();
|
|
1602
|
+
fs::write(
|
|
1603
|
+
root.join("wrapper-py/README.md"),
|
|
1604
|
+
render_wrapper_readme("Python", &load_config(&root).unwrap()),
|
|
1605
|
+
)
|
|
1606
|
+
.unwrap();
|
|
1607
|
+
root
|
|
1608
|
+
}
|
|
1609
|
+
}
|