@controlfront/detect 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/bin/cfb.js +202 -0
  2. package/package.json +64 -0
  3. package/src/commands/baseline.js +198 -0
  4. package/src/commands/init.js +309 -0
  5. package/src/commands/login.js +71 -0
  6. package/src/commands/logout.js +44 -0
  7. package/src/commands/scan.js +1547 -0
  8. package/src/commands/snapshot.js +191 -0
  9. package/src/commands/sync.js +127 -0
  10. package/src/config/baseUrl.js +49 -0
  11. package/src/data/tailwind-core-spec.js +149 -0
  12. package/src/engine/runRules.js +210 -0
  13. package/src/lib/collectDeclaredTokensAuto.js +67 -0
  14. package/src/lib/collectTokenMatches.js +330 -0
  15. package/src/lib/collectTokenMatches.js.regex +252 -0
  16. package/src/lib/loadRules.js +73 -0
  17. package/src/rules/core/no-hardcoded-colors.js +28 -0
  18. package/src/rules/core/no-hardcoded-spacing.js +29 -0
  19. package/src/rules/core/no-inline-styles.js +28 -0
  20. package/src/utils/authorId.js +106 -0
  21. package/src/utils/buildAIContributions.js +224 -0
  22. package/src/utils/buildBlameData.js +388 -0
  23. package/src/utils/buildDeclaredCssVars.js +185 -0
  24. package/src/utils/buildDeclaredJson.js +214 -0
  25. package/src/utils/buildFileChanges.js +372 -0
  26. package/src/utils/buildRuntimeUsage.js +337 -0
  27. package/src/utils/detectDeclaredDrift.js +59 -0
  28. package/src/utils/extractImports.js +178 -0
  29. package/src/utils/fileExtensions.js +65 -0
  30. package/src/utils/generateInsights.js +332 -0
  31. package/src/utils/getAllFiles.js +63 -0
  32. package/src/utils/getCommitMetaData.js +102 -0
  33. package/src/utils/getLine.js +14 -0
  34. package/src/utils/resolveProjectForFolder/index.js +47 -0
  35. package/src/utils/twClassify.js +138 -0
@@ -0,0 +1,224 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+ import { parse } from "@babel/parser";
4
+ import traverseModule from "@babel/traverse";
5
+ import { analyzeStructural } from "./signals/structural.js";
6
+ const traverse = traverseModule.default;
7
+
8
+ // -------------- Provenance parser --------------
9
+ function detectProvenanceTags(content) {
10
+ const lines = content.split("\n");
11
+ const results = [];
12
+ let block = null;
13
+
14
+ // Regex to match provenance tags with optional comment styles: //, /* */, {/* */}
15
+ const singleTagRegex = /(?:\/\/|\/\*|\{\s*\/\*)?\s*@cf:ai\b(?!:start|:end)/;
16
+ const startTagRegex = /(?:\/\/|\/\*|\{\s*\/\*)?\s*@cf:ai:start/;
17
+ const endTagRegex = /(?:\/\/|\/\*|\{\s*\/\*)?\s*@cf:ai:end/;
18
+
19
+ lines.forEach((line, i) => {
20
+ if (singleTagRegex.test(line)) {
21
+ results.push({ type: "single", line: i + 1 });
22
+ } else if (startTagRegex.test(line)) {
23
+ block = { start: i + 1 };
24
+ } else if (endTagRegex.test(line) && block) {
25
+ block.end = i + 1;
26
+ results.push({ type: "block", ...block });
27
+ block = null;
28
+ }
29
+ });
30
+ return results;
31
+ }
32
+
33
+ // -------------- Basic heuristic detector --------------
34
+ function inferAIContent(content) {
35
+ const heuristics = [
36
+ /Generated\s+by\s+ChatGPT/i,
37
+ /Generated\s+with\s+Copilot/i,
38
+ /Auto[-\s]?generated/i
39
+ ];
40
+ const lines = content.split("\n");
41
+ const matches = [];
42
+
43
+ lines.forEach((line, i) => {
44
+ if (heuristics.some(rx => rx.test(line))) {
45
+ matches.push({ type: "heuristic", line: i + 1 });
46
+ }
47
+ });
48
+ return matches;
49
+ }
50
+
51
+ // -------------- AST parser --------------
52
+ function parseAST(content, file) {
53
+ try {
54
+ return parse(content, {
55
+ sourceType: "module",
56
+ plugins: [
57
+ "jsx",
58
+ "typescript",
59
+ "classProperties",
60
+ "objectRestSpread",
61
+ "optionalChaining",
62
+ "nullishCoalescingOperator",
63
+ "decorators-legacy",
64
+ "dynamicImport",
65
+ "exportDefaultFrom",
66
+ "exportNamespaceFrom"
67
+ ]
68
+ });
69
+ } catch {
70
+ return null;
71
+ }
72
+ }
73
+
74
+ // Merge overlapping or adjacent ranges
75
+ function mergeRanges(ranges) {
76
+ if (!ranges.length) return [];
77
+ const sorted = ranges.slice().sort((a, b) => a.start - b.start);
78
+ const merged = [sorted[0]];
79
+ for (let i = 1; i < sorted.length; i++) {
80
+ const last = merged[merged.length - 1];
81
+ const current = sorted[i];
82
+ if (current.start <= last.end + 1) {
83
+ last.end = Math.max(last.end, current.end);
84
+ // Merge provenance types if different, but here we keep provenance as array or string
85
+ if (last.provenance !== current.provenance) {
86
+ if (Array.isArray(last.provenance)) {
87
+ if (!last.provenance.includes(current.provenance)) last.provenance.push(current.provenance);
88
+ } else if (last.provenance !== current.provenance) {
89
+ last.provenance = [last.provenance, current.provenance];
90
+ }
91
+ }
92
+ } else {
93
+ merged.push(current);
94
+ }
95
+ }
96
+ return merged;
97
+ }
98
+
99
+ // Safety function to ensure numeric stats
100
+ function safeNumber(value) {
101
+ return typeof value === "number" && Number.isFinite(value) && value >= 0 ? value : 0;
102
+ }
103
+
104
+ // -------------- Main entry --------------
105
+ export async function buildAIContributions({ projectRoot, files }) {
106
+ if (!files?.length) return { results: [], stats: {} };
107
+
108
+ const results = [];
109
+ let total_nodes_with_provenance = 0;
110
+
111
+ for (const fullPath of files) {
112
+ // File type guard: only process .js, .jsx, .ts, .tsx files
113
+ if (!/\.(js|jsx|ts|tsx)$/i.test(fullPath)) {
114
+ continue;
115
+ }
116
+
117
+ let content;
118
+ try {
119
+ const stats = await fs.promises.stat(fullPath);
120
+ // File size limit guard: skip files larger than 1MB (1048576 bytes)
121
+ if (stats.size > 1048576) {
122
+ continue;
123
+ }
124
+ content = await fs.promises.readFile(fullPath, "utf8");
125
+ } catch {
126
+ continue;
127
+ }
128
+
129
+ const declared = detectProvenanceTags(content);
130
+ const inferred = inferAIContent(content);
131
+
132
+ let node_results = [];
133
+ const ast = parseAST(content, fullPath);
134
+ if (ast) {
135
+ const provenanceLines = [];
136
+
137
+ declared.forEach(d => {
138
+ if (d.type === "single") {
139
+ provenanceLines.push({ start: d.line, end: d.line, provenance: "declared" });
140
+ } else if (d.type === "block") {
141
+ provenanceLines.push({ start: d.start, end: d.end, provenance: "declared" });
142
+ }
143
+ });
144
+ inferred.forEach(i => {
145
+ if (i.line) {
146
+ provenanceLines.push({ start: i.line, end: i.line, provenance: "inferred" });
147
+ }
148
+ });
149
+
150
+ // Merge overlapping or adjacent provenance ranges to avoid double counting
151
+ const mergedProvenance = mergeRanges(provenanceLines);
152
+
153
+ traverse(ast, {
154
+ enter(path) {
155
+ const node = path.node;
156
+ if (!node.loc) return;
157
+ const nodeStart = node.loc.start.line;
158
+ const nodeEnd = node.loc.end.line;
159
+
160
+ for (const prov of mergedProvenance) {
161
+ // Check if node lines overlap with provenance lines
162
+ if (
163
+ (nodeStart <= prov.end && nodeEnd >= prov.start)
164
+ ) {
165
+ node_results.push({
166
+ node_type: node.type,
167
+ start_line: nodeStart,
168
+ end_line: nodeEnd,
169
+ provenance: prov.provenance
170
+ });
171
+ break; // Avoid adding multiple times for same node
172
+ }
173
+ }
174
+ }
175
+ });
176
+ }
177
+
178
+ let structuralSignals = {};
179
+ try {
180
+ structuralSignals = await analyzeStructural(content);
181
+ } catch (err) {
182
+ structuralSignals = {};
183
+ }
184
+
185
+ if (declared.length || inferred.length || node_results.length) {
186
+ results.push({
187
+ file: path.relative(projectRoot, fullPath),
188
+ declared_count: declared.length,
189
+ inferred_count: inferred.length,
190
+ segments: [...declared, ...inferred],
191
+ node_results,
192
+ signals: { structural: structuralSignals }
193
+ });
194
+ total_nodes_with_provenance += node_results.length;
195
+ }
196
+ }
197
+
198
+ const stats = {
199
+ files_with_ai: safeNumber(results.length),
200
+ declared_total: safeNumber(results.reduce((a, r) => a + r.declared_count, 0)),
201
+ inferred_total: safeNumber(results.reduce((a, r) => a + r.inferred_count, 0)),
202
+ nodes_with_provenance_total: safeNumber(total_nodes_with_provenance),
203
+
204
+ structural: {
205
+ avg_comment_density: safeNumber(
206
+ results.reduce((a, r) => a + (r.signals?.structural?.comment_density || 0), 0) / results.length
207
+ ),
208
+ avg_jsx_density: safeNumber(
209
+ results.reduce((a, r) => a + (r.signals?.structural?.jsx_density || 0), 0) / results.length
210
+ ),
211
+ avg_identifier_entropy: safeNumber(
212
+ results.reduce((a, r) => a + (r.signals?.structural?.identifier_entropy || 0), 0) / results.length
213
+ )
214
+ }
215
+ };
216
+
217
+ const output = {
218
+ stats,
219
+ results
220
+ };
221
+
222
+ // Ensure output structure always includes both even if empty
223
+ return JSON.parse(JSON.stringify(output));
224
+ }
@@ -0,0 +1,388 @@
1
+ import { execFileSync } from "node:child_process";
2
+ import crypto from "node:crypto";
3
+
4
+ /**
5
+ * Steps 1–2 (diff + scoped blame): For a commit, collect per-file unified-0 diffs and the +line ranges
6
+ * that changed, then run `git blame` (against the parent commit) **only for those ranges**.
7
+ *
8
+ * If `filePaths` is provided, we ONLY process those files.
9
+ * If `parentSha` is omitted, we derive it (single-parent only). Merge commits are skipped.
10
+ *
11
+ * Output shape is stable for Step 1 consumers:
12
+ * - files[path].diff_unified0
13
+ * - files[path].ranges
14
+ *
15
+ * Step 2 adds (optional) aggregated blame stats:
16
+ * - files[path].blame_stats
17
+ *
18
+ * Author identity uses ONLY email hashed via sha256(emailLowerTrim).
19
+ */
20
+ export function buildBlameData({
21
+ repoRoot,
22
+ commitSha,
23
+ parentSha = null,
24
+ filePaths = null,
25
+ commitTimestampIso = null,
26
+ }) {
27
+ const parent = parentSha || deriveSingleParentSha(repoRoot, commitSha);
28
+ if (!parent) {
29
+ return { status: "skipped", reason: "missing_or_non_single_parent", files: {} };
30
+ }
31
+
32
+ // If we know the changed files, do one diff per file (lighter + easier to reason about).
33
+ if (Array.isArray(filePaths) && filePaths.length > 0) {
34
+ const files = {};
35
+
36
+ for (const filePath of filePaths) {
37
+ if (!filePath || typeof filePath !== "string") continue;
38
+
39
+ let diffText = "";
40
+ try {
41
+ diffText = execGit(repoRoot, [
42
+ "diff",
43
+ "--unified=0",
44
+ "--no-color",
45
+ parent,
46
+ commitSha,
47
+ "--",
48
+ filePath,
49
+ ]);
50
+ } catch (_e) {
51
+ // Treat per-file failures as simply missing diff for that file.
52
+ continue;
53
+ }
54
+
55
+ // If no diff output, nothing to store.
56
+ if (!diffText || diffText.trim().length === 0) continue;
57
+ const { new_ranges, old_ranges } = parseHunkRangesFromUnified0Diff(diffText);
58
+ const ranges = new_ranges;
59
+ const blame_stats = buildFileBlameStats({
60
+ repoRoot,
61
+ parent,
62
+ filePath,
63
+ ranges: old_ranges,
64
+ commitTimestampIso,
65
+ });
66
+
67
+ files[filePath] = {
68
+ diff_unified0: diffText,
69
+ ranges,
70
+ parent_ranges: old_ranges,
71
+ blame_stats,
72
+ };
73
+ }
74
+
75
+ const totals = Object.values(files).reduce(
76
+ (acc, f) => {
77
+ const bs = f && f.blame_stats ? f.blame_stats : null;
78
+ if (!bs) return acc;
79
+ acc.lines_changed += bs.lines_changed;
80
+ acc.lines_attributed += bs.lines_attributed;
81
+ return acc;
82
+ },
83
+ { lines_changed: 0, lines_attributed: 0 }
84
+ );
85
+
86
+ const line_coverage_pct = totals.lines_changed
87
+ ? round1((totals.lines_attributed / totals.lines_changed) * 100)
88
+ : 0;
89
+
90
+ return { status: "ok", line_coverage_pct, files };
91
+ }
92
+
93
+ // Fallback: one diff for the whole commit (older behaviour).
94
+ let diffText;
95
+ try {
96
+ diffText = execGit(repoRoot, [
97
+ "diff",
98
+ "--unified=0",
99
+ "--no-color",
100
+ parent,
101
+ commitSha,
102
+ "--",
103
+ ]);
104
+ } catch (_e) {
105
+ return { status: "error", reason: "git_diff_failed", files: {} };
106
+ }
107
+
108
+ const files = {};
109
+ let currentFile = null;
110
+
111
+ for (const line of diffText.split("\n")) {
112
+ if (line.startsWith("+++ ")) {
113
+ const p = line.slice(4).trim();
114
+ currentFile = p.startsWith("b/") ? p.slice(2) : p;
115
+ files[currentFile] = { diff_unified0: "", ranges: [], parent_ranges: [], blame_stats: null };
116
+ continue;
117
+ }
118
+
119
+ if (!currentFile) continue;
120
+
121
+ files[currentFile].diff_unified0 += line + "\n";
122
+
123
+ if (line.startsWith("@@")) {
124
+ const { old_ranges, new_ranges } = parseHunkRangesFromUnified0Diff(line);
125
+ for (const r of new_ranges) files[currentFile].ranges.push(r);
126
+ for (const r of old_ranges) files[currentFile].parent_ranges.push(r);
127
+ }
128
+ }
129
+
130
+ for (const [filePath, f] of Object.entries(files)) {
131
+ const newRanges = Array.isArray(f.ranges) ? f.ranges : [];
132
+ const oldRanges = Array.isArray(f.parent_ranges) ? f.parent_ranges : [];
133
+ const blameRanges = oldRanges.length ? oldRanges : newRanges;
134
+
135
+ f.blame_stats = buildFileBlameStats({
136
+ repoRoot,
137
+ parent,
138
+ filePath,
139
+ ranges: blameRanges,
140
+ commitTimestampIso,
141
+ });
142
+ }
143
+
144
+ const totals = Object.values(files).reduce(
145
+ (acc, f) => {
146
+ const bs = f && f.blame_stats ? f.blame_stats : null;
147
+ if (!bs) return acc;
148
+ acc.lines_changed += bs.lines_changed;
149
+ acc.lines_attributed += bs.lines_attributed;
150
+ return acc;
151
+ },
152
+ { lines_changed: 0, lines_attributed: 0 }
153
+ );
154
+
155
+ const line_coverage_pct = totals.lines_changed
156
+ ? round1((totals.lines_attributed / totals.lines_changed) * 100)
157
+ : 0;
158
+
159
+ return { status: "ok", line_coverage_pct, files };
160
+ }
161
+
162
+ function execGit(cwd, args) {
163
+ return execFileSync("git", args, {
164
+ cwd,
165
+ encoding: "utf8",
166
+ stdio: ["ignore", "pipe", "pipe"],
167
+ });
168
+ }
169
+
170
+ function deriveSingleParentSha(repoRoot, commitSha) {
171
+ if (!repoRoot || !commitSha) return null;
172
+
173
+ try {
174
+ const parents = execGit(repoRoot, ["show", "-s", "--pretty=%P", commitSha]).trim();
175
+ if (!parents) return null; // root commit
176
+
177
+ const arr = parents.split(/\s+/).filter(Boolean);
178
+ if (arr.length !== 1) return null; // merge commit (2+ parents) -> skip in v1
179
+
180
+ return arr[0];
181
+ } catch (_e) {
182
+ return null;
183
+ }
184
+ }
185
+
186
+ /**
187
+ * From a unified-0 diff text, parse BOTH hunk sides:
188
+ * @@ -oldStart,oldCount +newStart,newCount @@
189
+ *
190
+ * - `old_ranges` refer to line ranges in the PARENT file (correct for blaming `parent`).
191
+ * - `new_ranges` refer to line ranges in the NEW file (useful for highlighting new-line spans).
192
+ */
193
+ function parseHunkRangesFromUnified0Diff(diffText) {
194
+ const old_ranges = [];
195
+ const new_ranges = [];
196
+
197
+ for (const line of diffText.split("\n")) {
198
+ if (!line.startsWith("@@")) continue;
199
+
200
+ // Example: @@ -142,6 +142,12 @@
201
+ const mOld = line.match(/-([0-9]+)(?:,([0-9]+))?/);
202
+ const mNew = line.match(/\+([0-9]+)(?:,([0-9]+))?/);
203
+
204
+ if (mOld) {
205
+ const start = Number(mOld[1]);
206
+ const count = mOld[2] ? Number(mOld[2]) : 1;
207
+ // Pure additions have -start,0 and there are no old lines to blame.
208
+ if (count > 0) old_ranges.push({ start, end: start + count - 1 });
209
+ }
210
+
211
+ if (mNew) {
212
+ const start = Number(mNew[1]);
213
+ const count = mNew[2] ? Number(mNew[2]) : 1;
214
+ // Pure deletions have +start,0 and there are no new lines.
215
+ if (count > 0) new_ranges.push({ start, end: start + count - 1 });
216
+ }
217
+ }
218
+
219
+ return { old_ranges, new_ranges };
220
+ }
221
+
222
+ function buildFileBlameStats({ repoRoot, parent, filePath, ranges, commitTimestampIso }) {
223
+ const commitMs = commitTimestampIso ? Date.parse(commitTimestampIso) : Date.now();
224
+
225
+ let lines_changed = 0;
226
+ let lines_attributed = 0;
227
+
228
+ const prevAuthorCounts = new Map(); // author_id -> lines
229
+ const lineAgesDays = [];
230
+
231
+ const merged = mergeRanges(Array.isArray(ranges) ? ranges : []);
232
+
233
+ for (const r of merged) {
234
+ const start = r.start;
235
+ const end = r.end;
236
+ if (!Number.isFinite(start) || !Number.isFinite(end) || end < start) continue;
237
+
238
+ lines_changed += end - start + 1;
239
+
240
+ let blameText;
241
+ try {
242
+ blameText = execGit(repoRoot, [
243
+ "blame",
244
+ "--line-porcelain",
245
+ "-L",
246
+ `${start},${end}`,
247
+ parent,
248
+ "--",
249
+ filePath,
250
+ ]);
251
+ } catch (_e) {
252
+ // Common for newly added files (not present in parent), renames, binaries, etc.
253
+ // Keep going; coverage will reflect missing attribution.
254
+ continue;
255
+ }
256
+
257
+ const blamed = parseBlamePorcelain(blameText);
258
+
259
+ for (const b of blamed) {
260
+ if (!b.authorEmail) continue;
261
+
262
+ const author_id = authorIdFromEmail(b.authorEmail);
263
+ if (!author_id) continue;
264
+
265
+ lines_attributed += 1;
266
+ prevAuthorCounts.set(author_id, (prevAuthorCounts.get(author_id) || 0) + 1);
267
+
268
+ if (Number.isFinite(b.authorTimeSec)) {
269
+ const ageDays = Math.max(0, (commitMs - b.authorTimeSec * 1000) / 86400000);
270
+ lineAgesDays.push(ageDays);
271
+ }
272
+ }
273
+ }
274
+
275
+ const coverage_pct = lines_changed ? round1((lines_attributed / lines_changed) * 100) : 0;
276
+
277
+ const top = [...prevAuthorCounts.entries()]
278
+ .sort((a, b) => b[1] - a[1])
279
+ .slice(0, 10)
280
+ .map(([author_id, lines]) => ({
281
+ author_id,
282
+ lines,
283
+ pct: lines_attributed ? round1((lines / lines_attributed) * 100) : 0,
284
+ }));
285
+
286
+ let median = null;
287
+ let p90 = null;
288
+ if (lineAgesDays.length > 0) {
289
+ lineAgesDays.sort((a, b) => a - b);
290
+ median = round1(quantileSorted(lineAgesDays, 0.5));
291
+ p90 = round1(quantileSorted(lineAgesDays, 0.9));
292
+ }
293
+
294
+ return {
295
+ lines_changed,
296
+ lines_attributed,
297
+ coverage_pct,
298
+ prev_authors: {
299
+ unique_count: prevAuthorCounts.size,
300
+ top,
301
+ },
302
+ prev_author_entropy: round3(entropyFromCounts(prevAuthorCounts)),
303
+ line_age_days: {
304
+ median,
305
+ p90,
306
+ },
307
+ };
308
+ }
309
+
310
+ function authorIdFromEmail(email) {
311
+ if (!email || typeof email !== "string") return null;
312
+ return crypto
313
+ .createHash("sha256")
314
+ .update(email.trim().toLowerCase(), "utf8")
315
+ .digest("hex");
316
+ }
317
+
318
+ function parseBlamePorcelain(text) {
319
+ const out = [];
320
+ let current = null;
321
+
322
+ for (const raw of text.split("\n")) {
323
+ const line = raw.trimEnd();
324
+
325
+ // New blamed line header: "<sha> <orig> <final> <num>"
326
+ if (/^[0-9a-f]{8,40}\s+\d+\s+\d+\s+\d+/.test(line)) {
327
+ if (current) out.push(current);
328
+ current = { authorEmail: "", authorTimeSec: null };
329
+ continue;
330
+ }
331
+
332
+ if (!current) continue;
333
+
334
+ if (line.startsWith("author-mail ")) {
335
+ current.authorEmail = line
336
+ .slice("author-mail ".length)
337
+ .replace(/[<>]/g, "")
338
+ .trim();
339
+ } else if (line.startsWith("author-time ")) {
340
+ const v = Number(line.slice("author-time ".length).trim());
341
+ current.authorTimeSec = Number.isFinite(v) ? v : null;
342
+ }
343
+ }
344
+
345
+ if (current) out.push(current);
346
+ return out;
347
+ }
348
+
349
+ function mergeRanges(ranges) {
350
+ if (!Array.isArray(ranges) || ranges.length === 0) return [];
351
+ const sorted = [...ranges].sort((a, b) => a.start - b.start);
352
+ const merged = [];
353
+ for (const r of sorted) {
354
+ const last = merged[merged.length - 1];
355
+ if (!last || r.start > last.end + 1) merged.push({ start: r.start, end: r.end });
356
+ else last.end = Math.max(last.end, r.end);
357
+ }
358
+ return merged;
359
+ }
360
+
361
+ function entropyFromCounts(map) {
362
+ const counts = [...map.values()];
363
+ const total = counts.reduce((a, b) => a + b, 0);
364
+ if (!total) return 0;
365
+ let h = 0;
366
+ for (const c of counts) {
367
+ const p = c / total;
368
+ h -= p * Math.log2(p);
369
+ }
370
+ return h;
371
+ }
372
+
373
+ function quantileSorted(sorted, q) {
374
+ if (!sorted.length) return null;
375
+ const pos = (sorted.length - 1) * q;
376
+ const base = Math.floor(pos);
377
+ const rest = pos - base;
378
+ if (sorted[base + 1] === undefined) return sorted[base];
379
+ return sorted[base] + rest * (sorted[base + 1] - sorted[base]);
380
+ }
381
+
382
+ function round1(n) {
383
+ return Math.round(n * 10) / 10;
384
+ }
385
+
386
+ function round3(n) {
387
+ return Math.round(n * 1000) / 1000;
388
+ }