@promptowl/contextnest-cli 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +90 -0
- package/dist/chunk-ISTWO5D6.js +288 -0
- package/dist/index.js +33353 -66
- package/dist/parser-ZUZNXYQ4-TST5HLOG.js +9 -0
- package/package.json +3 -2
package/LICENSE
CHANGED
package/README.md
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
# @promptowl/contextnest-cli
|
|
2
|
+
|
|
3
|
+
Command-line tool for [Context Nest](https://github.com/PromptOwl/context-nest) — structured, versioned context vaults for AI agents.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install -g @promptowl/contextnest-cli
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Quick Start
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
# Initialize a vault with a starter recipe
|
|
15
|
+
ctx init --starter developer
|
|
16
|
+
|
|
17
|
+
# See all available starters
|
|
18
|
+
ctx init --list-starters
|
|
19
|
+
|
|
20
|
+
# Or initialize an empty vault and build it yourself
|
|
21
|
+
ctx init --name "My Vault"
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
### Available Starters
|
|
25
|
+
|
|
26
|
+
| Recipe | For | What You Get |
|
|
27
|
+
|--------|-----|-------------|
|
|
28
|
+
| `developer` | Engineering teams | Architecture, API reference, dev setup |
|
|
29
|
+
| `executive` | Leadership | Strategic vision, market landscape, decision log |
|
|
30
|
+
| `analyst` | Research / OSINT | Case files, source registry, methodology |
|
|
31
|
+
| `team` | General teams | Handbook, onboarding guide, runbook |
|
|
32
|
+
|
|
33
|
+
## Commands
|
|
34
|
+
|
|
35
|
+
### Document Management
|
|
36
|
+
- `ctx add <path>` — Create a new document
|
|
37
|
+
- `ctx update <path>` — Update a document
|
|
38
|
+
- `ctx delete <path>` — Delete a document
|
|
39
|
+
- `ctx publish <path>` — Publish (bump version, create checkpoint)
|
|
40
|
+
- `ctx validate [path]` — Validate against the spec
|
|
41
|
+
- `ctx list` — List documents (filter by `--type`, `--status`, `--tag`)
|
|
42
|
+
- `ctx search <query>` — Full-text search
|
|
43
|
+
|
|
44
|
+
### Context Injection
|
|
45
|
+
- `ctx inject <selector>` — Resolve context for AI agent consumption
|
|
46
|
+
- `ctx inject @org/pack` — Inject from a cloud-hosted pack
|
|
47
|
+
- `ctx resolve <selector>` — Execute a selector query
|
|
48
|
+
|
|
49
|
+
### Versioning & Integrity
|
|
50
|
+
- `ctx history <path>` — Show version history
|
|
51
|
+
- `ctx reconstruct <path> <version>` — Reconstruct a specific version
|
|
52
|
+
- `ctx verify` — Verify all hash chains
|
|
53
|
+
|
|
54
|
+
### Packs & Checkpoints
|
|
55
|
+
- `ctx pack list` — List context packs
|
|
56
|
+
- `ctx pack show <id>` — Show pack details
|
|
57
|
+
- `ctx checkpoint list` — List checkpoints
|
|
58
|
+
- `ctx checkpoint rebuild` — Rebuild checkpoint history
|
|
59
|
+
|
|
60
|
+
### Index
|
|
61
|
+
- `ctx index` — Regenerate context.yaml and INDEX.md files
|
|
62
|
+
|
|
63
|
+
## Selectors
|
|
64
|
+
|
|
65
|
+
```bash
|
|
66
|
+
ctx inject "tag:#engineering" # All docs with a tag
|
|
67
|
+
ctx inject "type:document" # All docs of a type
|
|
68
|
+
ctx inject "path:nodes/api-*" # Glob match
|
|
69
|
+
ctx inject "pack:engineering-essentials" # All docs in a pack
|
|
70
|
+
ctx inject "status:published" # By status
|
|
71
|
+
ctx inject "tag:#api + tag:#v2" # Union
|
|
72
|
+
ctx inject "tag:#api & status:published" # Intersection
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## Cloud Packs
|
|
76
|
+
|
|
77
|
+
Inject context from cloud-hosted packs without downloading source files:
|
|
78
|
+
|
|
79
|
+
```bash
|
|
80
|
+
ctx inject @promptowl/executive-ai-strategy
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
## Links
|
|
84
|
+
|
|
85
|
+
- [Context Nest repo](https://github.com/PromptOwl/context-nest)
|
|
86
|
+
- [PromptOwl](https://promptowl.com)
|
|
87
|
+
|
|
88
|
+
## License
|
|
89
|
+
|
|
90
|
+
Apache-2.0
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createRequire as __createRequire } from "node:module";
|
|
3
|
+
const require = __createRequire(import.meta.url);
|
|
4
|
+
var __create = Object.create;
|
|
5
|
+
var __defProp = Object.defineProperty;
|
|
6
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
7
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
8
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
9
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
10
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
11
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
12
|
+
}) : x)(function(x) {
|
|
13
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
14
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
15
|
+
});
|
|
16
|
+
var __commonJS = (cb, mod) => function __require2() {
|
|
17
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
18
|
+
};
|
|
19
|
+
var __export = (target, all) => {
|
|
20
|
+
for (var name in all)
|
|
21
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
22
|
+
};
|
|
23
|
+
var __copyProps = (to, from, except, desc) => {
|
|
24
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
25
|
+
for (let key of __getOwnPropNames(from))
|
|
26
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
27
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
28
|
+
}
|
|
29
|
+
return to;
|
|
30
|
+
};
|
|
31
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
32
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
33
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
34
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
35
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
36
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
37
|
+
mod
|
|
38
|
+
));
|
|
39
|
+
|
|
40
|
+
// ../engine/dist/chunk-ETTJRGPY.js
|
|
41
|
+
function tokenize(input) {
|
|
42
|
+
const tokens = [];
|
|
43
|
+
let pos = 0;
|
|
44
|
+
function skipWhitespace() {
|
|
45
|
+
while (pos < input.length && /\s/.test(input[pos])) pos++;
|
|
46
|
+
}
|
|
47
|
+
while (pos < input.length) {
|
|
48
|
+
skipWhitespace();
|
|
49
|
+
if (pos >= input.length) break;
|
|
50
|
+
const start = pos;
|
|
51
|
+
const ch = input[pos];
|
|
52
|
+
if (ch === "+") {
|
|
53
|
+
tokens.push({ type: "AND", value: "+", position: start });
|
|
54
|
+
pos++;
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
if (ch === "|") {
|
|
58
|
+
tokens.push({ type: "OR", value: "|", position: start });
|
|
59
|
+
pos++;
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
if (ch === "-") {
|
|
63
|
+
tokens.push({ type: "NOT", value: "-", position: start });
|
|
64
|
+
pos++;
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
if (ch === "(") {
|
|
68
|
+
tokens.push({ type: "LPAREN", value: "(", position: start });
|
|
69
|
+
pos++;
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
if (ch === ")") {
|
|
73
|
+
tokens.push({ type: "RPAREN", value: ")", position: start });
|
|
74
|
+
pos++;
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
if (ch === "#") {
|
|
78
|
+
pos++;
|
|
79
|
+
const tagStart = pos;
|
|
80
|
+
while (pos < input.length && /[a-zA-Z0-9_-]/.test(input[pos])) pos++;
|
|
81
|
+
const tagValue = input.slice(tagStart, pos);
|
|
82
|
+
if (!tagValue) {
|
|
83
|
+
throw new Error(`Invalid tag at position ${start}: expected tag name after #`);
|
|
84
|
+
}
|
|
85
|
+
tokens.push({ type: "TAG", value: tagValue, position: start });
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
if (input.slice(pos).startsWith("contextnest://")) {
|
|
89
|
+
const uriStart = pos;
|
|
90
|
+
pos += "contextnest://".length;
|
|
91
|
+
while (pos < input.length && !/[\s+|\-()]/.test(input[pos])) pos++;
|
|
92
|
+
tokens.push({ type: "URI", value: input.slice(uriStart, pos), position: uriStart });
|
|
93
|
+
continue;
|
|
94
|
+
}
|
|
95
|
+
if (ch === '"') {
|
|
96
|
+
pos++;
|
|
97
|
+
const strStart = pos;
|
|
98
|
+
while (pos < input.length && input[pos] !== '"') pos++;
|
|
99
|
+
const value = input.slice(strStart, pos);
|
|
100
|
+
pos++;
|
|
101
|
+
if (value.startsWith("contextnest://")) {
|
|
102
|
+
tokens.push({ type: "URI", value, position: start });
|
|
103
|
+
} else if (value.startsWith("#")) {
|
|
104
|
+
tokens.push({ type: "TAG", value: value.slice(1), position: start });
|
|
105
|
+
} else if (value.startsWith("pack:")) {
|
|
106
|
+
tokens.push({ type: "PACK", value: value.slice(5), position: start });
|
|
107
|
+
} else {
|
|
108
|
+
tokens.push({ type: "URI", value, position: start });
|
|
109
|
+
}
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
const wordMatch = input.slice(pos).match(/^([a-zA-Z_][a-zA-Z0-9_]*)/);
|
|
113
|
+
if (wordMatch) {
|
|
114
|
+
const word = wordMatch[1];
|
|
115
|
+
const afterWord = pos + word.length;
|
|
116
|
+
if (afterWord < input.length && input[afterWord] === ":") {
|
|
117
|
+
pos = afterWord + 1;
|
|
118
|
+
const valueStart = pos;
|
|
119
|
+
while (pos < input.length && /[a-zA-Z0-9_-]/.test(input[pos])) pos++;
|
|
120
|
+
const filterValue = input.slice(valueStart, pos);
|
|
121
|
+
switch (word) {
|
|
122
|
+
case "type":
|
|
123
|
+
tokens.push({ type: "TYPE_FILTER", value: filterValue, position: start });
|
|
124
|
+
break;
|
|
125
|
+
case "status":
|
|
126
|
+
tokens.push({ type: "STATUS_FILTER", value: filterValue, position: start });
|
|
127
|
+
break;
|
|
128
|
+
case "transport":
|
|
129
|
+
tokens.push({ type: "TRANSPORT_FILTER", value: filterValue, position: start });
|
|
130
|
+
break;
|
|
131
|
+
case "server":
|
|
132
|
+
tokens.push({ type: "SERVER_FILTER", value: filterValue, position: start });
|
|
133
|
+
break;
|
|
134
|
+
case "pack":
|
|
135
|
+
const packStart = pos - filterValue.length;
|
|
136
|
+
pos = packStart;
|
|
137
|
+
while (pos < input.length && /[a-zA-Z0-9_.-]/.test(input[pos])) pos++;
|
|
138
|
+
tokens.push({
|
|
139
|
+
type: "PACK",
|
|
140
|
+
value: input.slice(packStart, pos),
|
|
141
|
+
position: start
|
|
142
|
+
});
|
|
143
|
+
break;
|
|
144
|
+
default:
|
|
145
|
+
throw new Error(`Unknown filter type "${word}" at position ${start}`);
|
|
146
|
+
}
|
|
147
|
+
continue;
|
|
148
|
+
}
|
|
149
|
+
throw new Error(`Unexpected token "${word}" at position ${start}`);
|
|
150
|
+
}
|
|
151
|
+
throw new Error(`Unexpected character "${ch}" at position ${pos}`);
|
|
152
|
+
}
|
|
153
|
+
tokens.push({ type: "EOF", value: "", position: pos });
|
|
154
|
+
return tokens;
|
|
155
|
+
}
|
|
156
|
+
var ATOM_TYPES = [
|
|
157
|
+
"TAG",
|
|
158
|
+
"URI",
|
|
159
|
+
"PACK",
|
|
160
|
+
"TYPE_FILTER",
|
|
161
|
+
"STATUS_FILTER",
|
|
162
|
+
"TRANSPORT_FILTER",
|
|
163
|
+
"SERVER_FILTER"
|
|
164
|
+
];
|
|
165
|
+
var SelectorParser = class {
|
|
166
|
+
tokens;
|
|
167
|
+
pos = 0;
|
|
168
|
+
constructor(tokens) {
|
|
169
|
+
this.tokens = tokens;
|
|
170
|
+
}
|
|
171
|
+
peek() {
|
|
172
|
+
return this.tokens[this.pos];
|
|
173
|
+
}
|
|
174
|
+
advance() {
|
|
175
|
+
const token = this.tokens[this.pos];
|
|
176
|
+
this.pos++;
|
|
177
|
+
return token;
|
|
178
|
+
}
|
|
179
|
+
expect(type) {
|
|
180
|
+
const token = this.peek();
|
|
181
|
+
if (token.type !== type) {
|
|
182
|
+
throw new Error(
|
|
183
|
+
`Expected ${type} but got ${token.type} ("${token.value}") at position ${token.position}`
|
|
184
|
+
);
|
|
185
|
+
}
|
|
186
|
+
return this.advance();
|
|
187
|
+
}
|
|
188
|
+
isAtom() {
|
|
189
|
+
return ATOM_TYPES.includes(this.peek().type) || this.peek().type === "LPAREN";
|
|
190
|
+
}
|
|
191
|
+
parse() {
|
|
192
|
+
const result = this.parseOrExpr();
|
|
193
|
+
if (this.peek().type !== "EOF") {
|
|
194
|
+
throw new Error(
|
|
195
|
+
`Unexpected token "${this.peek().value}" at position ${this.peek().position}`
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
return result;
|
|
199
|
+
}
|
|
200
|
+
// or_expr → not_expr ("|" not_expr)*
|
|
201
|
+
parseOrExpr() {
|
|
202
|
+
let left = this.parseNotExpr();
|
|
203
|
+
while (this.peek().type === "OR") {
|
|
204
|
+
this.advance();
|
|
205
|
+
const right = this.parseNotExpr();
|
|
206
|
+
left = { type: "or", left, right };
|
|
207
|
+
}
|
|
208
|
+
return left;
|
|
209
|
+
}
|
|
210
|
+
// not_expr → and_expr ("-" and_expr)*
|
|
211
|
+
parseNotExpr() {
|
|
212
|
+
let left = this.parseAndExpr();
|
|
213
|
+
while (this.peek().type === "NOT") {
|
|
214
|
+
this.advance();
|
|
215
|
+
const right = this.parseAndExpr();
|
|
216
|
+
left = { type: "not", left, right };
|
|
217
|
+
}
|
|
218
|
+
return left;
|
|
219
|
+
}
|
|
220
|
+
// and_expr → atom (("+" | implicit) atom)*
|
|
221
|
+
parseAndExpr() {
|
|
222
|
+
let left = this.parseAtom();
|
|
223
|
+
while (true) {
|
|
224
|
+
if (this.peek().type === "AND") {
|
|
225
|
+
this.advance();
|
|
226
|
+
const right = this.parseAtom();
|
|
227
|
+
left = { type: "and", left, right };
|
|
228
|
+
} else if (this.isAtom()) {
|
|
229
|
+
const right = this.parseAtom();
|
|
230
|
+
left = { type: "and", left, right };
|
|
231
|
+
} else {
|
|
232
|
+
break;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
return left;
|
|
236
|
+
}
|
|
237
|
+
// atom → TAG | URI | PACK | TYPE_FILTER | STATUS_FILTER |
|
|
238
|
+
// TRANSPORT_FILTER | SERVER_FILTER | "(" expr ")"
|
|
239
|
+
parseAtom() {
|
|
240
|
+
const token = this.peek();
|
|
241
|
+
switch (token.type) {
|
|
242
|
+
case "TAG":
|
|
243
|
+
this.advance();
|
|
244
|
+
return { type: "tag", value: token.value };
|
|
245
|
+
case "URI":
|
|
246
|
+
this.advance();
|
|
247
|
+
return { type: "uri", value: token.value };
|
|
248
|
+
case "PACK":
|
|
249
|
+
this.advance();
|
|
250
|
+
return { type: "pack", value: token.value };
|
|
251
|
+
case "TYPE_FILTER":
|
|
252
|
+
this.advance();
|
|
253
|
+
return { type: "typeFilter", value: token.value };
|
|
254
|
+
case "STATUS_FILTER":
|
|
255
|
+
this.advance();
|
|
256
|
+
return { type: "statusFilter", value: token.value };
|
|
257
|
+
case "TRANSPORT_FILTER":
|
|
258
|
+
this.advance();
|
|
259
|
+
return { type: "transportFilter", value: token.value };
|
|
260
|
+
case "SERVER_FILTER":
|
|
261
|
+
this.advance();
|
|
262
|
+
return { type: "serverFilter", value: token.value };
|
|
263
|
+
case "LPAREN": {
|
|
264
|
+
this.advance();
|
|
265
|
+
const expr = this.parseOrExpr();
|
|
266
|
+
this.expect("RPAREN");
|
|
267
|
+
return expr;
|
|
268
|
+
}
|
|
269
|
+
default:
|
|
270
|
+
throw new Error(
|
|
271
|
+
`Unexpected token "${token.value}" at position ${token.position}`
|
|
272
|
+
);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
};
|
|
276
|
+
function parseSelector(input) {
|
|
277
|
+
const tokens = tokenize(input);
|
|
278
|
+
const parser = new SelectorParser(tokens);
|
|
279
|
+
return parser.parse();
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
export {
|
|
283
|
+
__require,
|
|
284
|
+
__commonJS,
|
|
285
|
+
__export,
|
|
286
|
+
__toESM,
|
|
287
|
+
parseSelector
|
|
288
|
+
};
|