careerclaw-js 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +362 -0
- package/README.md +348 -0
- package/SECURITY.md +156 -0
- package/SKILL.md +463 -0
- package/dist/adapters/hackernews.d.ts +36 -0
- package/dist/adapters/hackernews.d.ts.map +1 -0
- package/dist/adapters/hackernews.js +164 -0
- package/dist/adapters/hackernews.js.map +1 -0
- package/dist/adapters/index.d.ts +10 -0
- package/dist/adapters/index.d.ts.map +1 -0
- package/dist/adapters/index.js +9 -0
- package/dist/adapters/index.js.map +1 -0
- package/dist/adapters/remoteok.d.ts +35 -0
- package/dist/adapters/remoteok.d.ts.map +1 -0
- package/dist/adapters/remoteok.js +212 -0
- package/dist/adapters/remoteok.js.map +1 -0
- package/dist/briefing.d.ts +81 -0
- package/dist/briefing.d.ts.map +1 -0
- package/dist/briefing.js +152 -0
- package/dist/briefing.js.map +1 -0
- package/dist/cli.d.ts +22 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +235 -0
- package/dist/cli.js.map +1 -0
- package/dist/config.d.ts +91 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +126 -0
- package/dist/config.js.map +1 -0
- package/dist/core/text-processing.d.ts +62 -0
- package/dist/core/text-processing.d.ts.map +1 -0
- package/dist/core/text-processing.js +187 -0
- package/dist/core/text-processing.js.map +1 -0
- package/dist/drafting.d.ts +28 -0
- package/dist/drafting.d.ts.map +1 -0
- package/dist/drafting.js +116 -0
- package/dist/drafting.js.map +1 -0
- package/dist/gap.d.ts +27 -0
- package/dist/gap.d.ts.map +1 -0
- package/dist/gap.js +90 -0
- package/dist/gap.js.map +1 -0
- package/dist/license.d.ts +40 -0
- package/dist/license.d.ts.map +1 -0
- package/dist/license.js +122 -0
- package/dist/license.js.map +1 -0
- package/dist/llm-enhance.d.ts +69 -0
- package/dist/llm-enhance.d.ts.map +1 -0
- package/dist/llm-enhance.js +376 -0
- package/dist/llm-enhance.js.map +1 -0
- package/dist/matching/engine.d.ts +31 -0
- package/dist/matching/engine.d.ts.map +1 -0
- package/dist/matching/engine.js +51 -0
- package/dist/matching/engine.js.map +1 -0
- package/dist/matching/index.d.ts +8 -0
- package/dist/matching/index.d.ts.map +1 -0
- package/dist/matching/index.js +8 -0
- package/dist/matching/index.js.map +1 -0
- package/dist/matching/scoring.d.ts +84 -0
- package/dist/matching/scoring.d.ts.map +1 -0
- package/dist/matching/scoring.js +184 -0
- package/dist/matching/scoring.js.map +1 -0
- package/dist/models.d.ts +221 -0
- package/dist/models.d.ts.map +1 -0
- package/dist/models.js +28 -0
- package/dist/models.js.map +1 -0
- package/dist/requirements.d.ts +22 -0
- package/dist/requirements.d.ts.map +1 -0
- package/dist/requirements.js +30 -0
- package/dist/requirements.js.map +1 -0
- package/dist/resume-intel.d.ts +40 -0
- package/dist/resume-intel.d.ts.map +1 -0
- package/dist/resume-intel.js +111 -0
- package/dist/resume-intel.js.map +1 -0
- package/dist/sources.d.ts +32 -0
- package/dist/sources.d.ts.map +1 -0
- package/dist/sources.js +72 -0
- package/dist/sources.js.map +1 -0
- package/dist/tracking.d.ts +68 -0
- package/dist/tracking.d.ts.map +1 -0
- package/dist/tracking.js +140 -0
- package/dist/tracking.js.map +1 -0
- package/package.json +58 -0
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* resume-intel.ts — Section-aware resume intelligence builder.
|
|
3
|
+
*
|
|
4
|
+
* `buildResumeIntelligence()` extracts a weighted keyword/phrase corpus
|
|
5
|
+
* from multiple resume/profile inputs. Each source has a section weight
|
|
6
|
+
* (from SECTION_WEIGHTS) that determines how important its tokens are
|
|
7
|
+
* for gap analysis.
|
|
8
|
+
*
|
|
9
|
+
* Key design from PR-E: UserProfile.skills are injected as a synthetic
|
|
10
|
+
* "skills" section at weight 1.0 — the highest weight. This prevents
|
|
11
|
+
* skills the user explicitly listed from appearing as gaps.
|
|
12
|
+
*
|
|
13
|
+
* Output schema is JSON-compatible with Python careerclaw so
|
|
14
|
+
* `.careerclaw/resume_intel.json` files are portable across
|
|
15
|
+
* implementations.
|
|
16
|
+
*/
|
|
17
|
+
import { tokenizeUnique, extractPhrasesFromText, SECTION_WEIGHTS, } from "./core/text-processing.js";
|
|
18
|
+
// ---------------------------------------------------------------------------
|
|
19
|
+
// Public API
|
|
20
|
+
// ---------------------------------------------------------------------------
|
|
21
|
+
/**
|
|
22
|
+
* Build section-aware resume intelligence from available inputs.
|
|
23
|
+
*
|
|
24
|
+
* Sections processed (highest weight first):
|
|
25
|
+
* 1. skills (weight 1.0) — UserProfile.skills list
|
|
26
|
+
* 2. summary (weight 0.8) — resume_summary + target_roles
|
|
27
|
+
* 3. experience (weight 0.6) — resume_text (if provided)
|
|
28
|
+
*
|
|
29
|
+
* For each keyword, the final keyword_weight is the maximum weight
|
|
30
|
+
* across all sections in which it appeared.
|
|
31
|
+
*/
|
|
32
|
+
export function buildResumeIntelligence(params) {
|
|
33
|
+
const { resume_summary, resume_text, skills = [], target_roles = [] } = params;
|
|
34
|
+
// Determine source flag
|
|
35
|
+
const hasSkills = skills.length > 0 || target_roles.length > 0;
|
|
36
|
+
const hasResumeText = !!resume_text?.trim();
|
|
37
|
+
const source = hasSkills
|
|
38
|
+
? "skills_injected"
|
|
39
|
+
: hasResumeText
|
|
40
|
+
? "resume_text"
|
|
41
|
+
: "summary_only";
|
|
42
|
+
// Collect (tokens, weight) pairs per section
|
|
43
|
+
const sections = [];
|
|
44
|
+
// Section 1: skills (weight 1.0)
|
|
45
|
+
if (skills.length > 0) {
|
|
46
|
+
sections.push({
|
|
47
|
+
tokens: tokenizeUnique(skills.join(" ")),
|
|
48
|
+
weight: SECTION_WEIGHTS["skills"] ?? 1.0,
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
// Section 2: summary + target_roles (weight 0.8)
|
|
52
|
+
const summaryText = [resume_summary, ...target_roles].join(" ");
|
|
53
|
+
if (summaryText.trim()) {
|
|
54
|
+
sections.push({
|
|
55
|
+
tokens: tokenizeUnique(summaryText),
|
|
56
|
+
weight: SECTION_WEIGHTS["summary"] ?? 0.8,
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
// Section 3: full resume text (weight 0.6)
|
|
60
|
+
if (hasResumeText) {
|
|
61
|
+
sections.push({
|
|
62
|
+
tokens: tokenizeUnique(resume_text),
|
|
63
|
+
weight: SECTION_WEIGHTS["experience"] ?? 0.6,
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
// Build keyword_weights: max weight across sections
|
|
67
|
+
const keyword_weights = {};
|
|
68
|
+
const keyword_stream = [];
|
|
69
|
+
for (const { tokens, weight } of sections) {
|
|
70
|
+
for (const token of tokens) {
|
|
71
|
+
const existing = keyword_weights[token] ?? 0;
|
|
72
|
+
if (existing === 0)
|
|
73
|
+
keyword_stream.push(token); // first appearance
|
|
74
|
+
keyword_weights[token] = Math.max(existing, weight);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
// extracted_keywords = unique set (insertion order)
|
|
78
|
+
const extracted_keywords = [...new Set(keyword_stream)];
|
|
79
|
+
// impact_signals = keywords with weight >= 0.8 (skills + summary section)
|
|
80
|
+
const impact_signals = extracted_keywords.filter((k) => (keyword_weights[k] ?? 0) >= 0.8);
|
|
81
|
+
// Phrases: extract from combined text of all sections
|
|
82
|
+
const allText = [
|
|
83
|
+
skills.join(" "),
|
|
84
|
+
target_roles.join(" "),
|
|
85
|
+
resume_summary,
|
|
86
|
+
resume_text ?? "",
|
|
87
|
+
]
|
|
88
|
+
.filter(Boolean)
|
|
89
|
+
.join(" ");
|
|
90
|
+
const phrase_stream = extractPhrasesFromText(allText);
|
|
91
|
+
const extracted_phrases = [...new Set(phrase_stream)];
|
|
92
|
+
// phrase_weights: use the weight of the lower-weight component token
|
|
93
|
+
// (conservative — a phrase is only as strong as its weakest token)
|
|
94
|
+
const phrase_weights = {};
|
|
95
|
+
for (const phrase of extracted_phrases) {
|
|
96
|
+
const parts = phrase.split(" ");
|
|
97
|
+
const minWeight = Math.min(...parts.map((p) => keyword_weights[p] ?? SECTION_WEIGHTS["other"] ?? 0.3));
|
|
98
|
+
phrase_weights[phrase] = minWeight;
|
|
99
|
+
}
|
|
100
|
+
return {
|
|
101
|
+
extracted_keywords,
|
|
102
|
+
extracted_phrases,
|
|
103
|
+
keyword_stream,
|
|
104
|
+
phrase_stream,
|
|
105
|
+
impact_signals,
|
|
106
|
+
keyword_weights,
|
|
107
|
+
phrase_weights,
|
|
108
|
+
source,
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
//# sourceMappingURL=resume-intel.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"resume-intel.js","sourceRoot":"","sources":["../src/resume-intel.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AAGH,OAAO,EACL,cAAc,EACd,sBAAsB,EACtB,eAAe,GAChB,MAAM,2BAA2B,CAAC;AAiBnC,8EAA8E;AAC9E,aAAa;AACb,8EAA8E;AAE9E;;;;;;;;;;GAUG;AACH,MAAM,UAAU,uBAAuB,CACrC,MAAgC;IAEhC,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,GAAG,EAAE,EAAE,YAAY,GAAG,EAAE,EAAE,GAAG,MAAM,CAAC;IAE/E,wBAAwB;IACxB,MAAM,SAAS,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;IAC/D,MAAM,aAAa,GAAG,CAAC,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC;IAC5C,MAAM,MAAM,GAAiC,SAAS;QACpD,CAAC,CAAC,iBAAiB;QACnB,CAAC,CAAC,aAAa;YACf,CAAC,CAAC,aAAa;YACf,CAAC,CAAC,cAAc,CAAC;IAEnB,6CAA6C;IAC7C,MAAM,QAAQ,GAAgD,EAAE,CAAC;IAEjE,iCAAiC;IACjC,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACtB,QAAQ,CAAC,IAAI,CAAC;YACZ,MAAM,EAAE,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACxC,MAAM,EAAE,eAAe,CAAC,QAAQ,CAAC,IAAI,GAAG;SACzC,CAAC,CAAC;IACL,CAAC;IAED,iDAAiD;IACjD,MAAM,WAAW,GAAG,CAAC,cAAc,EAAE,GAAG,YAAY,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAChE,IAAI,WAAW,CAAC,IAAI,EAAE,EAAE,CAAC;QACvB,QAAQ,CAAC,IAAI,CAAC;YACZ,MAAM,EAAE,cAAc,CAAC,WAAW,CAAC;YACnC,MAAM,EAAE,eAAe,CAAC,SAAS,CAAC,IAAI,GAAG;SAC1C,CAAC,CAAC;IACL,CAAC;IAED,2CAA2C;IAC3C,IAAI,aAAa,EAAE,CAAC;QAClB,QAAQ,CAAC,IAAI,CAAC;YACZ,MAAM,EAAE,cAAc,CAAC,WAAY,CAAC;YACpC,MAAM,EAAE,eAAe,CAAC,YAAY,CAAC,IAAI,GAAG;SAC7C,CAAC,CAAC;IACL,CAAC;IAED,oDAAoD;IACpD,MAAM,eAAe,GAA2B,EAAE,CAAC;IACnD,MAAM,cAAc,GAAa,EAAE,CAAC;IAEpC,KAAK,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,QAAQ,EAAE,CAAC;QAC1C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC3B,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC7C,IAAI,QAAQ,KAAK,CAAC;gBAAE,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,mBAAmB;YACnE,eAAe,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QACtD,CAAC;IACH,CAAC;IAED,oDAAoD;IACpD,MAAM,kBAAkB,GAAG,CAAC,GAAG,IAAI,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC;IAExD,0EAA0E;IAC1E,MAAM,cAAc,GAAG,kBAAkB,CAAC,MAAM,CAC9C,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,GAAG,CACxC,CAAC;IAEF,sDAAsD;IACtD,MAAM,OAAO,GAAG;QACd,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC;QAChB,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QACtB,cAAc;QACd,WAAW,IAAI,EAAE;KAClB;SACE,MAAM,CAAC,OAAO,CAAC;SACf,IAAI,CAAC,GAAG,CAAC,CAAC;IAEb,MAAM,aAAa,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC;IACtD,MAAM,iBAAiB,GAAG,CAAC,GAAG,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;IAEtD,qEAAqE;IACrE,mEAAmE;IACnE,MAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE,CAAC;QACvC,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAChC,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CACxB,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,IAAI,eAAe,CAAC,OAAO,CAAC,IAAI,GAAG,CAAC,CAC3E,CAAC;QACF,cAAc,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC;IACrC,CAAC;IAED,OAAO;QACL,kBAAkB;QAClB,iBAAiB;QACjB,cAAc;QACd,aAAa;QACb,cAAc;QACd,eAAe;QACf,cAAc;QACd,MAAM;KACP,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* sources.ts — Source aggregation layer.
|
|
3
|
+
*
|
|
4
|
+
* `fetchAllJobs()` is the single entry point for the briefing pipeline.
|
|
5
|
+
* It calls both adapters independently (per-source error isolation), merges
|
|
6
|
+
* results, and deduplicates by `job_id` (first-seen wins).
|
|
7
|
+
*
|
|
8
|
+
* Downstream layers (matching engine, gap analysis) are source-agnostic —
|
|
9
|
+
* they only see `NormalizedJob[]`.
|
|
10
|
+
*/
|
|
11
|
+
import type { NormalizedJob, JobSource } from "./models.js";
|
|
12
|
+
export interface FetchResult {
|
|
13
|
+
jobs: NormalizedJob[];
|
|
14
|
+
/** Per-source job counts for run instrumentation. */
|
|
15
|
+
counts: Partial<Record<JobSource, number>>;
|
|
16
|
+
/** Per-source errors — non-empty means a source was degraded. */
|
|
17
|
+
errors: Partial<Record<JobSource, string>>;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Fetch jobs from all configured sources and return a deduplicated list.
|
|
21
|
+
*
|
|
22
|
+
* Failures in individual sources are caught and recorded in `errors` —
|
|
23
|
+
* the pipeline continues with whatever sources succeeded. This mirrors
|
|
24
|
+
* the Python careerclaw per-source resilience pattern.
|
|
25
|
+
*/
|
|
26
|
+
export declare function fetchAllJobs(): Promise<FetchResult>;
|
|
27
|
+
/**
|
|
28
|
+
* Deduplicate a list of jobs by `job_id`.
|
|
29
|
+
* First-seen wins — preserves RemoteOK order before HN order.
|
|
30
|
+
*/
|
|
31
|
+
export declare function deduplicate(jobs: NormalizedJob[]): NormalizedJob[];
|
|
32
|
+
//# sourceMappingURL=sources.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sources.d.ts","sourceRoot":"","sources":["../src/sources.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EAAE,aAAa,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAQ5D,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,aAAa,EAAE,CAAC;IACtB,qDAAqD;IACrD,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC,CAAC;IAC3C,iEAAiE;IACjE,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC,CAAC;CAC5C;AAMD;;;;;;GAMG;AACH,wBAAsB,YAAY,IAAI,OAAO,CAAC,WAAW,CAAC,CAgCzD;AAMD;;;GAGG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,aAAa,EAAE,GAAG,aAAa,EAAE,CAUlE"}
|
package/dist/sources.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* sources.ts — Source aggregation layer.
|
|
3
|
+
*
|
|
4
|
+
* `fetchAllJobs()` is the single entry point for the briefing pipeline.
|
|
5
|
+
* It calls both adapters independently (per-source error isolation), merges
|
|
6
|
+
* results, and deduplicates by `job_id` (first-seen wins).
|
|
7
|
+
*
|
|
8
|
+
* Downstream layers (matching engine, gap analysis) are source-agnostic —
|
|
9
|
+
* they only see `NormalizedJob[]`.
|
|
10
|
+
*/
|
|
11
|
+
import { fetchRemoteOkJobs } from "./adapters/remoteok.js";
|
|
12
|
+
import { fetchHnJobs } from "./adapters/hackernews.js";
|
|
13
|
+
// ---------------------------------------------------------------------------
|
|
14
|
+
// Public API
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
/**
|
|
17
|
+
* Fetch jobs from all configured sources and return a deduplicated list.
|
|
18
|
+
*
|
|
19
|
+
* Failures in individual sources are caught and recorded in `errors` —
|
|
20
|
+
* the pipeline continues with whatever sources succeeded. This mirrors
|
|
21
|
+
* the Python careerclaw per-source resilience pattern.
|
|
22
|
+
*/
|
|
23
|
+
export async function fetchAllJobs() {
|
|
24
|
+
const counts = {};
|
|
25
|
+
const errors = {};
|
|
26
|
+
const allJobs = [];
|
|
27
|
+
// Run both adapters concurrently; isolate failures per source
|
|
28
|
+
const [remoteokResult, hnResult] = await Promise.allSettled([
|
|
29
|
+
fetchRemoteOkJobs(),
|
|
30
|
+
fetchHnJobs(),
|
|
31
|
+
]);
|
|
32
|
+
if (remoteokResult.status === "fulfilled") {
|
|
33
|
+
counts["remoteok"] = remoteokResult.value.length;
|
|
34
|
+
allJobs.push(...remoteokResult.value);
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
errors["remoteok"] = String(remoteokResult.reason);
|
|
38
|
+
counts["remoteok"] = 0;
|
|
39
|
+
}
|
|
40
|
+
if (hnResult.status === "fulfilled") {
|
|
41
|
+
counts["hackernews"] = hnResult.value.length;
|
|
42
|
+
allJobs.push(...hnResult.value);
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
errors["hackernews"] = String(hnResult.reason);
|
|
46
|
+
counts["hackernews"] = 0;
|
|
47
|
+
}
|
|
48
|
+
return {
|
|
49
|
+
jobs: deduplicate(allJobs),
|
|
50
|
+
counts,
|
|
51
|
+
errors,
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
// ---------------------------------------------------------------------------
|
|
55
|
+
// Deduplication
|
|
56
|
+
// ---------------------------------------------------------------------------
|
|
57
|
+
/**
|
|
58
|
+
* Deduplicate a list of jobs by `job_id`.
|
|
59
|
+
* First-seen wins — preserves RemoteOK order before HN order.
|
|
60
|
+
*/
|
|
61
|
+
export function deduplicate(jobs) {
|
|
62
|
+
const seen = new Set();
|
|
63
|
+
const result = [];
|
|
64
|
+
for (const job of jobs) {
|
|
65
|
+
if (!seen.has(job.job_id)) {
|
|
66
|
+
seen.add(job.job_id);
|
|
67
|
+
result.push(job);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return result;
|
|
71
|
+
}
|
|
72
|
+
//# sourceMappingURL=sources.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sources.js","sourceRoot":"","sources":["../src/sources.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAGH,OAAO,EAAE,iBAAiB,EAAE,MAAM,wBAAwB,CAAC;AAC3D,OAAO,EAAE,WAAW,EAAE,MAAM,0BAA0B,CAAC;AAcvD,8EAA8E;AAC9E,aAAa;AACb,8EAA8E;AAE9E;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY;IAChC,MAAM,MAAM,GAAuC,EAAE,CAAC;IACtD,MAAM,MAAM,GAAuC,EAAE,CAAC;IACtD,MAAM,OAAO,GAAoB,EAAE,CAAC;IAEpC,8DAA8D;IAC9D,MAAM,CAAC,cAAc,EAAE,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,UAAU,CAAC;QAC1D,iBAAiB,EAAE;QACnB,WAAW,EAAE;KACd,CAAC,CAAC;IAEH,IAAI,cAAc,CAAC,MAAM,KAAK,WAAW,EAAE,CAAC;QAC1C,MAAM,CAAC,UAAU,CAAC,GAAG,cAAc,CAAC,KAAK,CAAC,MAAM,CAAC;QACjD,OAAO,CAAC,IAAI,CAAC,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;IACxC,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,UAAU,CAAC,GAAG,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;QACnD,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IACzB,CAAC;IAED,IAAI,QAAQ,CAAC,MAAM,KAAK,WAAW,EAAE,CAAC;QACpC,MAAM,CAAC,YAAY,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC;QAC7C,OAAO,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;IAClC,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;QAC/C,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;IAC3B,CAAC;IAED,OAAO;QACL,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAC1B,MAAM;QACN,MAAM;KACP,CAAC;AACJ,CAAC;AAED,8EAA8E;AAC9E,gBAAgB;AAChB,8EAA8E;AAE9E;;;GAGG;AACH,MAAM,UAAU,WAAW,CAAC,IAAqB;IAC/C,MAAM,IAAI,GAAG,IAAI,GAAG,EAAU,CAAC;IAC/B,MAAM,MAAM,GAAoB,EAAE,CAAC;IACnC,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;YAC1B,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACrB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACnB,CAAC;IACH,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* tracking.ts — Persistence layer for tracking.json and runs.jsonl.
|
|
3
|
+
*
|
|
4
|
+
* `TrackingRepository` manages two runtime files under `.careerclaw/`:
|
|
5
|
+
*
|
|
6
|
+
* tracking.json — keyed object of TrackingEntry records, one per
|
|
7
|
+
* saved job_id. First-seen wins for status; re-runs
|
|
8
|
+
* update `last_seen_at` only.
|
|
9
|
+
*
|
|
10
|
+
* runs.jsonl — append-only newline-delimited JSON log; one
|
|
11
|
+
* BriefingRun per line. Never rewritten, only grown.
|
|
12
|
+
*
|
|
13
|
+
* Dry-run mode: all write operations are no-ops. The repository is
|
|
14
|
+
* fully functional for reads; callers can inspect what *would* have
|
|
15
|
+
* been written via the return values of upsertEntry() / appendRun().
|
|
16
|
+
*
|
|
17
|
+
* The runtime directory is created automatically on first write if it
|
|
18
|
+
* does not exist.
|
|
19
|
+
*/
|
|
20
|
+
import type { TrackingEntry, BriefingRun, NormalizedJob, ScoredJob } from "./models.js";
|
|
21
|
+
/** In-memory store: job_id → TrackingEntry. */
|
|
22
|
+
type TrackingStore = Record<string, TrackingEntry>;
|
|
23
|
+
export interface UpsertResult {
|
|
24
|
+
/** Number of new entries created this call. */
|
|
25
|
+
created: number;
|
|
26
|
+
/** Number of entries already present (last_seen_at updated). */
|
|
27
|
+
already_present: number;
|
|
28
|
+
}
|
|
29
|
+
export declare class TrackingRepository {
|
|
30
|
+
private readonly trackingPath;
|
|
31
|
+
private readonly runsPath;
|
|
32
|
+
private readonly dryRun;
|
|
33
|
+
constructor(options?: {
|
|
34
|
+
trackingPath?: string;
|
|
35
|
+
runsPath?: string;
|
|
36
|
+
dryRun?: boolean;
|
|
37
|
+
});
|
|
38
|
+
/**
|
|
39
|
+
* Load tracking.json from disk.
|
|
40
|
+
* Returns an empty store when the file does not yet exist.
|
|
41
|
+
*/
|
|
42
|
+
load(): TrackingStore;
|
|
43
|
+
/**
|
|
44
|
+
* Persist the in-memory store to tracking.json.
|
|
45
|
+
* No-op in dry-run mode.
|
|
46
|
+
*/
|
|
47
|
+
save(store: TrackingStore): void;
|
|
48
|
+
/**
|
|
49
|
+
* Upsert a batch of jobs into the tracking store.
|
|
50
|
+
*
|
|
51
|
+
* Behaviour:
|
|
52
|
+
* - New job_id → creates a TrackingEntry with status "saved"
|
|
53
|
+
* - Existing job_id → updates last_seen_at only; status is preserved
|
|
54
|
+
*
|
|
55
|
+
* Writes the updated store to disk unless dry-run.
|
|
56
|
+
* Returns counts of created vs already_present entries.
|
|
57
|
+
*/
|
|
58
|
+
upsertEntries(jobs: NormalizedJob[], scored?: ScoredJob[]): UpsertResult;
|
|
59
|
+
/**
|
|
60
|
+
* Append a BriefingRun record to runs.jsonl.
|
|
61
|
+
* Each call adds exactly one newline-terminated JSON line.
|
|
62
|
+
* No-op in dry-run mode.
|
|
63
|
+
*/
|
|
64
|
+
appendRun(run: BriefingRun): void;
|
|
65
|
+
private ensureDir;
|
|
66
|
+
}
|
|
67
|
+
export {};
|
|
68
|
+
//# sourceMappingURL=tracking.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tracking.d.ts","sourceRoot":"","sources":["../src/tracking.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;GAkBG;AAIH,OAAO,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE,aAAa,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAOxF,+CAA+C;AAC/C,KAAK,aAAa,GAAG,MAAM,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;AAEnD,MAAM,WAAW,YAAY;IAC3B,+CAA+C;IAC/C,OAAO,EAAE,MAAM,CAAC;IAChB,gEAAgE;IAChE,eAAe,EAAE,MAAM,CAAC;CACzB;AAMD,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAS;IACtC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAS;IAClC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAU;gBAErB,OAAO,CAAC,EAAE;QACpB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,MAAM,CAAC,EAAE,OAAO,CAAC;KAClB;IAUD;;;OAGG;IACH,IAAI,IAAI,aAAa;IAerB;;;OAGG;IACH,IAAI,CAAC,KAAK,EAAE,aAAa,GAAG,IAAI;IAMhC;;;;;;;;;OASG;IACH,aAAa,CACX,IAAI,EAAE,aAAa,EAAE,EACrB,MAAM,CAAC,EAAE,SAAS,EAAE,GACnB,YAAY;IA2Bf;;;;OAIG;IACH,SAAS,CAAC,GAAG,EAAE,WAAW,GAAG,IAAI;IAUjC,OAAO,CAAC,SAAS;CAMlB"}
|
package/dist/tracking.js
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* tracking.ts — Persistence layer for tracking.json and runs.jsonl.
|
|
3
|
+
*
|
|
4
|
+
* `TrackingRepository` manages two runtime files under `.careerclaw/`:
|
|
5
|
+
*
|
|
6
|
+
* tracking.json — keyed object of TrackingEntry records, one per
|
|
7
|
+
* saved job_id. First-seen wins for status; re-runs
|
|
8
|
+
* update `last_seen_at` only.
|
|
9
|
+
*
|
|
10
|
+
* runs.jsonl — append-only newline-delimited JSON log; one
|
|
11
|
+
* BriefingRun per line. Never rewritten, only grown.
|
|
12
|
+
*
|
|
13
|
+
* Dry-run mode: all write operations are no-ops. The repository is
|
|
14
|
+
* fully functional for reads; callers can inspect what *would* have
|
|
15
|
+
* been written via the return values of upsertEntry() / appendRun().
|
|
16
|
+
*
|
|
17
|
+
* The runtime directory is created automatically on first write if it
|
|
18
|
+
* does not exist.
|
|
19
|
+
*/
|
|
20
|
+
import { readFileSync, writeFileSync, appendFileSync, mkdirSync, existsSync } from "fs";
|
|
21
|
+
import { dirname } from "path";
|
|
22
|
+
import { TRACKING_PATH, RUNS_PATH } from "./config.js";
|
|
23
|
+
// ---------------------------------------------------------------------------
|
|
24
|
+
// TrackingRepository
|
|
25
|
+
// ---------------------------------------------------------------------------
|
|
26
|
+
export class TrackingRepository {
|
|
27
|
+
trackingPath;
|
|
28
|
+
runsPath;
|
|
29
|
+
dryRun;
|
|
30
|
+
constructor(options) {
|
|
31
|
+
this.trackingPath = options?.trackingPath ?? TRACKING_PATH;
|
|
32
|
+
this.runsPath = options?.runsPath ?? RUNS_PATH;
|
|
33
|
+
this.dryRun = options?.dryRun ?? false;
|
|
34
|
+
}
|
|
35
|
+
// -------------------------------------------------------------------------
|
|
36
|
+
// Read
|
|
37
|
+
// -------------------------------------------------------------------------
|
|
38
|
+
/**
|
|
39
|
+
* Load tracking.json from disk.
|
|
40
|
+
* Returns an empty store when the file does not yet exist.
|
|
41
|
+
*/
|
|
42
|
+
load() {
|
|
43
|
+
if (!existsSync(this.trackingPath))
|
|
44
|
+
return {};
|
|
45
|
+
try {
|
|
46
|
+
const raw = readFileSync(this.trackingPath, "utf8");
|
|
47
|
+
return JSON.parse(raw);
|
|
48
|
+
}
|
|
49
|
+
catch {
|
|
50
|
+
// Corrupt or unreadable file — start fresh rather than crashing.
|
|
51
|
+
return {};
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
// -------------------------------------------------------------------------
|
|
55
|
+
// Write — tracking.json
|
|
56
|
+
// -------------------------------------------------------------------------
|
|
57
|
+
/**
|
|
58
|
+
* Persist the in-memory store to tracking.json.
|
|
59
|
+
* No-op in dry-run mode.
|
|
60
|
+
*/
|
|
61
|
+
save(store) {
|
|
62
|
+
if (this.dryRun)
|
|
63
|
+
return;
|
|
64
|
+
this.ensureDir(this.trackingPath);
|
|
65
|
+
writeFileSync(this.trackingPath, JSON.stringify(store, null, 2), "utf8");
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Upsert a batch of jobs into the tracking store.
|
|
69
|
+
*
|
|
70
|
+
* Behaviour:
|
|
71
|
+
* - New job_id → creates a TrackingEntry with status "saved"
|
|
72
|
+
* - Existing job_id → updates last_seen_at only; status is preserved
|
|
73
|
+
*
|
|
74
|
+
* Writes the updated store to disk unless dry-run.
|
|
75
|
+
* Returns counts of created vs already_present entries.
|
|
76
|
+
*/
|
|
77
|
+
upsertEntries(jobs, scored) {
|
|
78
|
+
const store = this.load();
|
|
79
|
+
const now = new Date().toISOString();
|
|
80
|
+
let created = 0;
|
|
81
|
+
let already_present = 0;
|
|
82
|
+
for (const job of jobs) {
|
|
83
|
+
const existing = store[job.job_id];
|
|
84
|
+
if (existing) {
|
|
85
|
+
// Update last_seen_at; preserve all other fields including status.
|
|
86
|
+
existing.last_seen_at = now;
|
|
87
|
+
existing.updated_at = now;
|
|
88
|
+
already_present++;
|
|
89
|
+
}
|
|
90
|
+
else {
|
|
91
|
+
store[job.job_id] = makeEntry(job, now);
|
|
92
|
+
created++;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
this.save(store);
|
|
96
|
+
return { created, already_present };
|
|
97
|
+
}
|
|
98
|
+
// -------------------------------------------------------------------------
|
|
99
|
+
// Write — runs.jsonl
|
|
100
|
+
// -------------------------------------------------------------------------
|
|
101
|
+
/**
|
|
102
|
+
* Append a BriefingRun record to runs.jsonl.
|
|
103
|
+
* Each call adds exactly one newline-terminated JSON line.
|
|
104
|
+
* No-op in dry-run mode.
|
|
105
|
+
*/
|
|
106
|
+
appendRun(run) {
|
|
107
|
+
if (this.dryRun)
|
|
108
|
+
return;
|
|
109
|
+
this.ensureDir(this.runsPath);
|
|
110
|
+
appendFileSync(this.runsPath, JSON.stringify(run) + "\n", "utf8");
|
|
111
|
+
}
|
|
112
|
+
// -------------------------------------------------------------------------
|
|
113
|
+
// Helpers
|
|
114
|
+
// -------------------------------------------------------------------------
|
|
115
|
+
ensureDir(filePath) {
|
|
116
|
+
const dir = dirname(filePath);
|
|
117
|
+
if (!existsSync(dir)) {
|
|
118
|
+
mkdirSync(dir, { recursive: true });
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
// ---------------------------------------------------------------------------
|
|
123
|
+
// Factory helpers
|
|
124
|
+
// ---------------------------------------------------------------------------
|
|
125
|
+
function makeEntry(job, now) {
|
|
126
|
+
return {
|
|
127
|
+
job_id: job.job_id,
|
|
128
|
+
status: "saved",
|
|
129
|
+
title: job.title,
|
|
130
|
+
company: job.company,
|
|
131
|
+
url: job.url,
|
|
132
|
+
source: job.source,
|
|
133
|
+
saved_at: now,
|
|
134
|
+
applied_at: null,
|
|
135
|
+
updated_at: now,
|
|
136
|
+
last_seen_at: now,
|
|
137
|
+
notes: null,
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
//# sourceMappingURL=tracking.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tracking.js","sourceRoot":"","sources":["../src/tracking.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;GAkBG;AAEH,OAAO,EAAE,YAAY,EAAE,aAAa,EAAE,cAAc,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,IAAI,CAAC;AACxF,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAE/B,OAAO,EAAE,aAAa,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAgBvD,8EAA8E;AAC9E,qBAAqB;AACrB,8EAA8E;AAE9E,MAAM,OAAO,kBAAkB;IACZ,YAAY,CAAS;IACrB,QAAQ,CAAS;IACjB,MAAM,CAAU;IAEjC,YAAY,OAIX;QACC,IAAI,CAAC,YAAY,GAAG,OAAO,EAAE,YAAY,IAAI,aAAa,CAAC;QAC3D,IAAI,CAAC,QAAQ,GAAG,OAAO,EAAE,QAAQ,IAAI,SAAS,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,KAAK,CAAC;IACzC,CAAC;IAED,4EAA4E;IAC5E,OAAO;IACP,4EAA4E;IAE5E;;;OAGG;IACH,IAAI;QACF,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC;YAAE,OAAO,EAAE,CAAC;QAC9C,IAAI,CAAC;YACH,MAAM,GAAG,GAAG,YAAY,CAAC,IAAI,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;YACpD,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAkB,CAAC;QAC1C,CAAC;QAAC,MAAM,CAAC;YACP,iEAAiE;YACjE,OAAO,EAAE,CAAC;QACZ,CAAC;IACH,CAAC;IAED,4EAA4E;IAC5E,wBAAwB;IACxB,4EAA4E;IAE5E;;;OAGG;IACH,IAAI,CAAC,KAAoB;QACvB,IAAI,IAAI,CAAC,MAAM;YAAE,OAAO;QACxB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAClC,aAAa,CAAC,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC;IAC3E,CAAC;IAED;;;;;;;;;OASG;IACH,aAAa,CACX,IAAqB,EACrB,MAAoB;QAEpB,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;QAC1B,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;QACrC,IAAI,OAAO,GAAG,CAAC,CAAC;QAChB,IAAI,eAAe,GAAG,CAAC,CAAC;QAExB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;YACvB,MAAM,QAAQ,GAAG,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACnC,IAAI,QAAQ,EAAE,CAAC;gBACb,mEAAmE;gBACnE,QAAQ,CAAC,YAAY,GAAG,GAAG,CAAC;gBAC5B,QAAQ,CAAC,UAAU,GAAG,GAAG,CAAC;gBAC1B,eAAe,EAAE,CAAC;YACpB,CAAC;iBAAM,CAAC;gBACN,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;gBACxC,OAAO,EAAE,CAAC;YACZ,CAAC;QACH,CAAC;QAED,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACjB,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,CAAC;IACtC,CAAC;IAED,4EAA4E;IAC5E,qBAAqB;IACrB,4EAA4E;IAE5E;;;;OAIG;IACH,SAAS,CAAC,GAAgB;QACxB,IAAI,IAAI,CAAC,MAAM;YAAE,OAAO;QACxB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC9B,cAAc,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,IAAI,EAAE,MAAM,CAAC,CAAC;IACpE,CAAC;IAED,4EAA4E;IAC5E,UAAU;IACV,4EAA4E;IAEpE,SAAS,CAAC,QAAgB;QAChC,MAAM,GAAG,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC9B,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACrB,SAAS,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;CACF;AAED,8EAA8E;AAC9E,kBAAkB;AAClB,8EAA8E;AAE9E,SAAS,SAAS,CAAC,GAAkB,EAAE,GAAW;IAChD,OAAO;QACL,MAAM,EAAE,GAAG,CAAC,MAAM;QAClB,MAAM,EAAE,OAAO;QACf,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,OAAO,EAAE,GAAG,CAAC,OAAO;QACpB,GAAG,EAAE,GAAG,CAAC,GAAG;QACZ,MAAM,EAAE,GAAG,CAAC,MAAM;QAClB,QAAQ,EAAE,GAAG;QACb,UAAU,EAAE,IAAI;QAChB,UAAU,EAAE,GAAG;QACf,YAAY,EAAE,GAAG;QACjB,KAAK,EAAE,IAAI;KACZ,CAAC;AACJ,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "careerclaw-js",
|
|
3
|
+
"version": "0.11.0",
|
|
4
|
+
"description": "AI-powered job search automation for OpenClaw — Node.js rewrite of CareerClaw",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"bin": {
|
|
9
|
+
"careerclaw-js": "dist/cli.js"
|
|
10
|
+
},
|
|
11
|
+
"scripts": {
|
|
12
|
+
"build": "tsc",
|
|
13
|
+
"dev": "tsc --watch",
|
|
14
|
+
"test": "vitest run",
|
|
15
|
+
"test:watch": "vitest",
|
|
16
|
+
"test:coverage": "vitest run --coverage",
|
|
17
|
+
"lint": "tsc --noEmit",
|
|
18
|
+
"clean": "rm -rf dist",
|
|
19
|
+
"smoke:sources": "npx tsx scripts/smoke_sources.ts",
|
|
20
|
+
"smoke:briefing": "npx tsx scripts/smoke_briefing.ts",
|
|
21
|
+
"smoke:llm": "npx tsx --env-file=.env scripts/smoke_llm.ts",
|
|
22
|
+
"debugging:license": "npx tsx --env-file=.env scripts/debug_license.ts",
|
|
23
|
+
"debugging:pro": "npx tsx --env-file=.env scripts/debug_pro_gate.ts",
|
|
24
|
+
"debugging:llm-response": "npx tsx --env-file=.env scripts/debug_llm_response.ts",
|
|
25
|
+
"cli": "tsx src/cli.ts --profile .careerclaw/profile.json --resume-txt .careerclaw/resume.txt --dry-run",
|
|
26
|
+
"cli:enhanced": "npx tsx --env-file=.env src/cli.ts --json --dry-run"
|
|
27
|
+
},
|
|
28
|
+
"files": [
|
|
29
|
+
"dist",
|
|
30
|
+
"SKILL.md",
|
|
31
|
+
"README.md",
|
|
32
|
+
"CHANGELOG.md",
|
|
33
|
+
"SECURITY.md"
|
|
34
|
+
],
|
|
35
|
+
"keywords": [
|
|
36
|
+
"careerclaw",
|
|
37
|
+
"openclaw",
|
|
38
|
+
"clawhub",
|
|
39
|
+
"job-search",
|
|
40
|
+
"automation",
|
|
41
|
+
"ai"
|
|
42
|
+
],
|
|
43
|
+
"author": "Orestes Garcia Martinez",
|
|
44
|
+
"license": "MIT",
|
|
45
|
+
"engines": {
|
|
46
|
+
"node": ">=20.0.0"
|
|
47
|
+
},
|
|
48
|
+
"devDependencies": {
|
|
49
|
+
"@types/node": "^22.0.0",
|
|
50
|
+
"@vitest/coverage-v8": "^2.1.9",
|
|
51
|
+
"tsx": "^4.21.0",
|
|
52
|
+
"typescript": "^5.5.0",
|
|
53
|
+
"vitest": "^2.0.0"
|
|
54
|
+
},
|
|
55
|
+
"dependencies": {
|
|
56
|
+
"fast-xml-parser": "^5.4.2"
|
|
57
|
+
}
|
|
58
|
+
}
|