@justram/pie 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +30 -0
- package/LICENSE +21 -0
- package/README.md +236 -0
- package/bin/pie +14 -0
- package/dist/cache/index.d.ts +4 -0
- package/dist/cache/index.js +3 -0
- package/dist/cache/warm.d.ts +3 -0
- package/dist/cache/warm.js +23 -0
- package/dist/cli/args.d.ts +30 -0
- package/dist/cli/args.js +185 -0
- package/dist/cli/attachments.d.ts +7 -0
- package/dist/cli/attachments.js +29 -0
- package/dist/cli/config.d.ts +22 -0
- package/dist/cli/config.js +20 -0
- package/dist/cli/image.d.ts +17 -0
- package/dist/cli/image.js +73 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +1 -0
- package/dist/cli/oauth.d.ts +14 -0
- package/dist/cli/oauth.js +178 -0
- package/dist/cli/stream.d.ts +7 -0
- package/dist/cli/stream.js +73 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +15 -0
- package/dist/core/cache/file.d.ts +4 -0
- package/dist/core/cache/file.js +44 -0
- package/dist/core/cache/key.d.ts +2 -0
- package/dist/core/cache/key.js +12 -0
- package/dist/core/cache/memory.d.ts +4 -0
- package/dist/core/cache/memory.js +33 -0
- package/dist/core/cache/types.d.ts +19 -0
- package/dist/core/cache/types.js +1 -0
- package/dist/core/errors.d.ts +39 -0
- package/dist/core/errors.js +50 -0
- package/dist/core/events.d.ts +87 -0
- package/dist/core/events.js +1 -0
- package/dist/core/extract.d.ts +4 -0
- package/dist/core/extract.js +384 -0
- package/dist/core/frontmatter.d.ts +5 -0
- package/dist/core/frontmatter.js +58 -0
- package/dist/core/helpers.d.ts +5 -0
- package/dist/core/helpers.js +80 -0
- package/dist/core/schema/normalize.d.ts +7 -0
- package/dist/core/schema/normalize.js +187 -0
- package/dist/core/setup.d.ts +13 -0
- package/dist/core/setup.js +174 -0
- package/dist/core/types.d.ts +143 -0
- package/dist/core/types.js +1 -0
- package/dist/core/validators/assert.d.ts +1 -0
- package/dist/core/validators/assert.js +18 -0
- package/dist/core/validators/command.d.ts +1 -0
- package/dist/core/validators/command.js +10 -0
- package/dist/core/validators/http.d.ts +1 -0
- package/dist/core/validators/http.js +28 -0
- package/dist/core/validators/index.d.ts +22 -0
- package/dist/core/validators/index.js +55 -0
- package/dist/core/validators/shell.d.ts +9 -0
- package/dist/core/validators/shell.js +24 -0
- package/dist/errors.d.ts +1 -0
- package/dist/errors.js +1 -0
- package/dist/events.d.ts +1 -0
- package/dist/events.js +1 -0
- package/dist/extract.d.ts +4 -0
- package/dist/extract.js +18 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.js +8 -0
- package/dist/main.d.ts +9 -0
- package/dist/main.js +571 -0
- package/dist/models.d.ts +21 -0
- package/dist/models.js +21 -0
- package/dist/recipes/index.d.ts +34 -0
- package/dist/recipes/index.js +185 -0
- package/dist/runtime/node.d.ts +2 -0
- package/dist/runtime/node.js +71 -0
- package/dist/runtime/types.d.ts +32 -0
- package/dist/runtime/types.js +1 -0
- package/dist/setup.d.ts +2 -0
- package/dist/setup.js +1 -0
- package/dist/types.d.ts +1 -0
- package/dist/types.js +1 -0
- package/dist/utils/helpers.d.ts +5 -0
- package/dist/utils/helpers.js +80 -0
- package/package.json +71 -0
package/dist/models.d.ts
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Model utilities re-exported from pi-ai.
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* ```typescript
|
|
6
|
+
* import { getModel, getModels, getProviders } from "pie";
|
|
7
|
+
*
|
|
8
|
+
* // Get specific model
|
|
9
|
+
* const model = getModel("anthropic", "claude-sonnet-4-5");
|
|
10
|
+
*
|
|
11
|
+
* // List all models for a provider
|
|
12
|
+
* const anthropicModels = getModels("anthropic");
|
|
13
|
+
*
|
|
14
|
+
* // List all providers
|
|
15
|
+
* const providers = getProviders();
|
|
16
|
+
* ```
|
|
17
|
+
*
|
|
18
|
+
* This approach avoids hardcoding model IDs which break when pi-ai updates.
|
|
19
|
+
* Users get full type safety from pi-ai's generated model types.
|
|
20
|
+
*/
|
|
21
|
+
export { getModel, getModels, getProviders } from "@mariozechner/pi-ai";
|
package/dist/models.js
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Model utilities re-exported from pi-ai.
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* ```typescript
|
|
6
|
+
* import { getModel, getModels, getProviders } from "pie";
|
|
7
|
+
*
|
|
8
|
+
* // Get specific model
|
|
9
|
+
* const model = getModel("anthropic", "claude-sonnet-4-5");
|
|
10
|
+
*
|
|
11
|
+
* // List all models for a provider
|
|
12
|
+
* const anthropicModels = getModels("anthropic");
|
|
13
|
+
*
|
|
14
|
+
* // List all providers
|
|
15
|
+
* const providers = getProviders();
|
|
16
|
+
* ```
|
|
17
|
+
*
|
|
18
|
+
* This approach avoids hardcoding model IDs which break when pi-ai updates.
|
|
19
|
+
* Users get full type safety from pi-ai's generated model types.
|
|
20
|
+
*/
|
|
21
|
+
export { getModel, getModels, getProviders } from "@mariozechner/pi-ai";
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import type { ExtractionSetup } from "../core/setup.js";
|
|
2
|
+
import type { ExtractOptions } from "../core/types.js";
|
|
3
|
+
export interface RecipeFrontmatter {
|
|
4
|
+
name?: string;
|
|
5
|
+
description?: string;
|
|
6
|
+
[key: string]: unknown;
|
|
7
|
+
}
|
|
8
|
+
export interface Recipe {
|
|
9
|
+
name: string;
|
|
10
|
+
description: string;
|
|
11
|
+
filePath: string;
|
|
12
|
+
baseDir: string;
|
|
13
|
+
source: string;
|
|
14
|
+
}
|
|
15
|
+
export interface RecipeWarning {
|
|
16
|
+
recipePath: string;
|
|
17
|
+
message: string;
|
|
18
|
+
}
|
|
19
|
+
export interface LoadRecipesOptions {
|
|
20
|
+
cwd?: string;
|
|
21
|
+
customDirectories?: string[];
|
|
22
|
+
}
|
|
23
|
+
export interface LoadRecipesResult {
|
|
24
|
+
recipes: Recipe[];
|
|
25
|
+
warnings: RecipeWarning[];
|
|
26
|
+
}
|
|
27
|
+
export interface LoadRecipeSetupOptions<T> {
|
|
28
|
+
setupFile?: string;
|
|
29
|
+
vars?: Record<string, unknown>;
|
|
30
|
+
overrides?: Partial<Omit<ExtractOptions<T>, "prompt" | "schema">>;
|
|
31
|
+
}
|
|
32
|
+
export declare function loadRecipes(options?: LoadRecipesOptions): LoadRecipesResult;
|
|
33
|
+
export declare function resolveRecipe(name: string, options?: LoadRecipesOptions): Recipe | undefined;
|
|
34
|
+
export declare function loadRecipeSetup<T>(recipe: Recipe, options?: LoadRecipeSetupOptions<T>): ExtractionSetup<T>;
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
import { basename, dirname, join, resolve } from "node:path";
|
|
4
|
+
import { parseFrontmatter } from "../core/frontmatter.js";
|
|
5
|
+
import { loadExtractionSetup } from "../core/setup.js";
|
|
6
|
+
const RECIPE_FILE_NAME = "RECIPE.md";
|
|
7
|
+
const MAX_NAME_LENGTH = 64;
|
|
8
|
+
const MAX_DESCRIPTION_LENGTH = 1024;
|
|
9
|
+
const ALLOWED_FRONTMATTER_FIELDS = new Set(["name", "description", "metadata"]);
|
|
10
|
+
export function loadRecipes(options = {}) {
|
|
11
|
+
const cwd = options.cwd ?? process.cwd();
|
|
12
|
+
const customDirectories = options.customDirectories ?? [];
|
|
13
|
+
const sources = [
|
|
14
|
+
{ dir: join(homedir(), ".pie", "recipes"), source: "pie-user" },
|
|
15
|
+
{ dir: resolve(cwd, ".pie", "recipes"), source: "pie-project" },
|
|
16
|
+
...customDirectories.map((dir) => ({ dir: expandHome(dir), source: "custom" })),
|
|
17
|
+
];
|
|
18
|
+
const recipesByName = new Map();
|
|
19
|
+
const warnings = [];
|
|
20
|
+
for (const entry of sources) {
|
|
21
|
+
const result = loadRecipesFromDir(entry.dir, entry.source);
|
|
22
|
+
warnings.push(...result.warnings);
|
|
23
|
+
for (const recipe of result.recipes) {
|
|
24
|
+
const existing = recipesByName.get(recipe.name);
|
|
25
|
+
if (existing) {
|
|
26
|
+
warnings.push({
|
|
27
|
+
recipePath: recipe.filePath,
|
|
28
|
+
message: `Recipe "${recipe.name}" overrides ${existing.filePath}.`,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
recipesByName.set(recipe.name, recipe);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return {
|
|
35
|
+
recipes: Array.from(recipesByName.values()),
|
|
36
|
+
warnings,
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
export function resolveRecipe(name, options = {}) {
|
|
40
|
+
const { recipes } = loadRecipes(options);
|
|
41
|
+
return recipes.find((recipe) => recipe.name === name);
|
|
42
|
+
}
|
|
43
|
+
export function loadRecipeSetup(recipe, options = {}) {
|
|
44
|
+
const setupFile = options.setupFile ?? "setup.md";
|
|
45
|
+
const setupPath = resolve(recipe.baseDir, setupFile);
|
|
46
|
+
const setupOptions = {
|
|
47
|
+
vars: options.vars,
|
|
48
|
+
overrides: options.overrides,
|
|
49
|
+
};
|
|
50
|
+
return loadExtractionSetup(setupPath, setupOptions);
|
|
51
|
+
}
|
|
52
|
+
function loadRecipesFromDir(dir, source) {
|
|
53
|
+
const recipes = [];
|
|
54
|
+
const warnings = [];
|
|
55
|
+
if (!existsSync(dir)) {
|
|
56
|
+
return { recipes, warnings };
|
|
57
|
+
}
|
|
58
|
+
try {
|
|
59
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
60
|
+
for (const entry of entries) {
|
|
61
|
+
if (entry.name.startsWith(".")) {
|
|
62
|
+
continue;
|
|
63
|
+
}
|
|
64
|
+
if (entry.name === "node_modules") {
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
const fullPath = join(dir, entry.name);
|
|
68
|
+
let isDirectory = entry.isDirectory();
|
|
69
|
+
let isFile = entry.isFile();
|
|
70
|
+
if (entry.isSymbolicLink()) {
|
|
71
|
+
try {
|
|
72
|
+
const stats = statSync(fullPath);
|
|
73
|
+
isDirectory = stats.isDirectory();
|
|
74
|
+
isFile = stats.isFile();
|
|
75
|
+
}
|
|
76
|
+
catch {
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
if (isDirectory) {
|
|
81
|
+
const subResult = loadRecipesFromDir(fullPath, source);
|
|
82
|
+
recipes.push(...subResult.recipes);
|
|
83
|
+
warnings.push(...subResult.warnings);
|
|
84
|
+
continue;
|
|
85
|
+
}
|
|
86
|
+
if (isFile && entry.name === RECIPE_FILE_NAME) {
|
|
87
|
+
const result = loadRecipeFromFile(fullPath, source);
|
|
88
|
+
if (result.recipe) {
|
|
89
|
+
recipes.push(result.recipe);
|
|
90
|
+
}
|
|
91
|
+
warnings.push(...result.warnings);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
catch { }
|
|
96
|
+
return { recipes, warnings };
|
|
97
|
+
}
|
|
98
|
+
function loadRecipeFromFile(filePath, source) {
|
|
99
|
+
const warnings = [];
|
|
100
|
+
try {
|
|
101
|
+
const rawContent = readFileSync(filePath, "utf-8");
|
|
102
|
+
const { frontmatter } = parseFrontmatter(rawContent);
|
|
103
|
+
const allKeys = Object.keys(frontmatter);
|
|
104
|
+
const recipeDir = dirname(filePath);
|
|
105
|
+
const parentDirName = basename(recipeDir);
|
|
106
|
+
const fieldErrors = validateFrontmatterFields(allKeys);
|
|
107
|
+
for (const error of fieldErrors) {
|
|
108
|
+
warnings.push({ recipePath: filePath, message: error });
|
|
109
|
+
}
|
|
110
|
+
const descErrors = validateDescription(frontmatter.description);
|
|
111
|
+
for (const error of descErrors) {
|
|
112
|
+
warnings.push({ recipePath: filePath, message: error });
|
|
113
|
+
}
|
|
114
|
+
const name = frontmatter.name ?? parentDirName;
|
|
115
|
+
const nameErrors = validateName(name, parentDirName);
|
|
116
|
+
for (const error of nameErrors) {
|
|
117
|
+
warnings.push({ recipePath: filePath, message: error });
|
|
118
|
+
}
|
|
119
|
+
if (!frontmatter.description || frontmatter.description.trim() === "") {
|
|
120
|
+
return { recipe: null, warnings };
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
recipe: {
|
|
124
|
+
name,
|
|
125
|
+
description: frontmatter.description,
|
|
126
|
+
filePath,
|
|
127
|
+
baseDir: recipeDir,
|
|
128
|
+
source,
|
|
129
|
+
},
|
|
130
|
+
warnings,
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
catch (error) {
|
|
134
|
+
const message = error instanceof Error ? error.message : "failed to parse recipe file";
|
|
135
|
+
warnings.push({ recipePath: filePath, message });
|
|
136
|
+
return { recipe: null, warnings };
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
function validateFrontmatterFields(keys) {
|
|
140
|
+
const errors = [];
|
|
141
|
+
for (const key of keys) {
|
|
142
|
+
if (!ALLOWED_FRONTMATTER_FIELDS.has(key)) {
|
|
143
|
+
errors.push(`unknown frontmatter field "${key}"`);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return errors;
|
|
147
|
+
}
|
|
148
|
+
function validateDescription(description) {
|
|
149
|
+
const errors = [];
|
|
150
|
+
if (!description || description.trim() === "") {
|
|
151
|
+
errors.push("description is required");
|
|
152
|
+
}
|
|
153
|
+
else if (description.length > MAX_DESCRIPTION_LENGTH) {
|
|
154
|
+
errors.push(`description exceeds ${MAX_DESCRIPTION_LENGTH} characters (${description.length})`);
|
|
155
|
+
}
|
|
156
|
+
return errors;
|
|
157
|
+
}
|
|
158
|
+
function validateName(name, parentDirName) {
|
|
159
|
+
const errors = [];
|
|
160
|
+
if (name !== parentDirName) {
|
|
161
|
+
errors.push(`name "${name}" does not match parent directory "${parentDirName}"`);
|
|
162
|
+
}
|
|
163
|
+
if (name.length > MAX_NAME_LENGTH) {
|
|
164
|
+
errors.push(`name exceeds ${MAX_NAME_LENGTH} characters (${name.length})`);
|
|
165
|
+
}
|
|
166
|
+
if (!/^[a-z0-9-]+$/.test(name)) {
|
|
167
|
+
errors.push("name contains invalid characters (must be lowercase a-z, 0-9, hyphens only)");
|
|
168
|
+
}
|
|
169
|
+
if (name.startsWith("-") || name.endsWith("-")) {
|
|
170
|
+
errors.push("name must not start or end with a hyphen");
|
|
171
|
+
}
|
|
172
|
+
if (name.includes("--")) {
|
|
173
|
+
errors.push("name must not contain consecutive hyphens");
|
|
174
|
+
}
|
|
175
|
+
return errors;
|
|
176
|
+
}
|
|
177
|
+
function expandHome(path) {
|
|
178
|
+
if (path === "~") {
|
|
179
|
+
return homedir();
|
|
180
|
+
}
|
|
181
|
+
if (path.startsWith("~/")) {
|
|
182
|
+
return join(homedir(), path.slice(2));
|
|
183
|
+
}
|
|
184
|
+
return path;
|
|
185
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import { readFile, rm, stat, writeFile, mkdir } from "node:fs/promises";
|
|
3
|
+
import { spawn } from "node:child_process";
|
|
4
|
+
export function createNodeRuntime() {
|
|
5
|
+
return {
|
|
6
|
+
now: () => Date.now(),
|
|
7
|
+
env: {
|
|
8
|
+
get: (name) => process.env[name],
|
|
9
|
+
},
|
|
10
|
+
fs: {
|
|
11
|
+
exists: async (path) => {
|
|
12
|
+
try {
|
|
13
|
+
await stat(path);
|
|
14
|
+
return true;
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
readFile: async (path) => {
|
|
21
|
+
const buf = await readFile(path);
|
|
22
|
+
return new Uint8Array(buf);
|
|
23
|
+
},
|
|
24
|
+
readTextFile: async (path) => {
|
|
25
|
+
return await readFile(path, { encoding: "utf8" });
|
|
26
|
+
},
|
|
27
|
+
writeFile: async (path, data) => {
|
|
28
|
+
await writeFile(path, data);
|
|
29
|
+
},
|
|
30
|
+
writeTextFile: async (path, text) => {
|
|
31
|
+
await writeFile(path, text, { encoding: "utf8" });
|
|
32
|
+
},
|
|
33
|
+
mkdir: async (path, options) => {
|
|
34
|
+
await mkdir(path, { recursive: options?.recursive ?? false });
|
|
35
|
+
},
|
|
36
|
+
remove: async (path, options) => {
|
|
37
|
+
await rm(path, { recursive: options?.recursive ?? false, force: true });
|
|
38
|
+
},
|
|
39
|
+
},
|
|
40
|
+
crypto: {
|
|
41
|
+
sha256Hex: async (text) => {
|
|
42
|
+
return createHash("sha256").update(text).digest("hex");
|
|
43
|
+
},
|
|
44
|
+
},
|
|
45
|
+
process: {
|
|
46
|
+
runShell: async (command, options) => {
|
|
47
|
+
const child = spawn("sh", ["-c", command], {
|
|
48
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
49
|
+
signal: options?.signal,
|
|
50
|
+
});
|
|
51
|
+
const stdoutChunks = [];
|
|
52
|
+
const stderrChunks = [];
|
|
53
|
+
child.stdout.on("data", (chunk) => stdoutChunks.push(chunk));
|
|
54
|
+
child.stderr.on("data", (chunk) => stderrChunks.push(chunk));
|
|
55
|
+
if (options?.stdin !== undefined) {
|
|
56
|
+
child.stdin.write(options.stdin);
|
|
57
|
+
}
|
|
58
|
+
child.stdin.end();
|
|
59
|
+
const code = await new Promise((resolve, reject) => {
|
|
60
|
+
child.on("error", reject);
|
|
61
|
+
child.on("close", (c) => resolve(c ?? 0));
|
|
62
|
+
});
|
|
63
|
+
return {
|
|
64
|
+
code,
|
|
65
|
+
stdout: Buffer.concat(stdoutChunks).toString("utf8"),
|
|
66
|
+
stderr: Buffer.concat(stderrChunks).toString("utf8"),
|
|
67
|
+
};
|
|
68
|
+
},
|
|
69
|
+
},
|
|
70
|
+
};
|
|
71
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
export interface Runtime {
|
|
2
|
+
now(): number;
|
|
3
|
+
env: {
|
|
4
|
+
get(name: string): string | undefined;
|
|
5
|
+
};
|
|
6
|
+
fs: {
|
|
7
|
+
exists(path: string): Promise<boolean>;
|
|
8
|
+
readFile(path: string): Promise<Uint8Array>;
|
|
9
|
+
readTextFile(path: string): Promise<string>;
|
|
10
|
+
writeFile(path: string, data: Uint8Array): Promise<void>;
|
|
11
|
+
writeTextFile(path: string, text: string): Promise<void>;
|
|
12
|
+
mkdir(path: string, options?: {
|
|
13
|
+
recursive?: boolean;
|
|
14
|
+
}): Promise<void>;
|
|
15
|
+
remove(path: string, options?: {
|
|
16
|
+
recursive?: boolean;
|
|
17
|
+
}): Promise<void>;
|
|
18
|
+
};
|
|
19
|
+
crypto: {
|
|
20
|
+
sha256Hex(text: string): Promise<string>;
|
|
21
|
+
};
|
|
22
|
+
process: {
|
|
23
|
+
runShell(command: string, options?: {
|
|
24
|
+
stdin?: string;
|
|
25
|
+
signal?: AbortSignal;
|
|
26
|
+
}): Promise<{
|
|
27
|
+
code: number;
|
|
28
|
+
stdout: string;
|
|
29
|
+
stderr: string;
|
|
30
|
+
}>;
|
|
31
|
+
};
|
|
32
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/setup.d.ts
ADDED
package/dist/setup.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { loadExtractionSetup } from "./core/setup.js";
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export type * from "./core/types.js";
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { AssistantMessage, ImageContent, Message, ToolCall } from "@mariozechner/pi-ai";
|
|
2
|
+
export declare function buildMessages(input: string | Message[], attachments?: ImageContent[]): Message[];
|
|
3
|
+
export declare function findToolCall(message: AssistantMessage, toolName: string): ToolCall | undefined;
|
|
4
|
+
export declare function getTextContent(message: AssistantMessage): string;
|
|
5
|
+
export declare function parseJsonFromText(message: AssistantMessage): unknown | null;
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
export function buildMessages(input, attachments) {
|
|
2
|
+
if (typeof input !== "string") {
|
|
3
|
+
return input;
|
|
4
|
+
}
|
|
5
|
+
const content = attachments?.length
|
|
6
|
+
? [{ type: "text", text: input }, ...attachments]
|
|
7
|
+
: input;
|
|
8
|
+
const message = {
|
|
9
|
+
role: "user",
|
|
10
|
+
content,
|
|
11
|
+
timestamp: Date.now(),
|
|
12
|
+
};
|
|
13
|
+
return [message];
|
|
14
|
+
}
|
|
15
|
+
export function findToolCall(message, toolName) {
|
|
16
|
+
for (const content of message.content) {
|
|
17
|
+
if (content.type === "toolCall" && content.name === toolName) {
|
|
18
|
+
return content;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
return undefined;
|
|
22
|
+
}
|
|
23
|
+
export function getTextContent(message) {
|
|
24
|
+
return message.content
|
|
25
|
+
.filter((content) => content.type === "text")
|
|
26
|
+
.map((content) => content.text)
|
|
27
|
+
.join("\n")
|
|
28
|
+
.trim();
|
|
29
|
+
}
|
|
30
|
+
export function parseJsonFromText(message) {
|
|
31
|
+
const text = getTextContent(message);
|
|
32
|
+
if (!text) {
|
|
33
|
+
return null;
|
|
34
|
+
}
|
|
35
|
+
const candidates = collectJsonCandidates(text);
|
|
36
|
+
for (const candidate of candidates) {
|
|
37
|
+
const parsed = tryParseJson(candidate);
|
|
38
|
+
if (parsed !== null) {
|
|
39
|
+
return parsed;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return null;
|
|
43
|
+
}
|
|
44
|
+
function collectJsonCandidates(text) {
|
|
45
|
+
const candidates = [];
|
|
46
|
+
const fenced = extractFencedJson(text);
|
|
47
|
+
if (fenced) {
|
|
48
|
+
candidates.push(fenced);
|
|
49
|
+
}
|
|
50
|
+
candidates.push(text);
|
|
51
|
+
const objectCandidate = sliceBetween(text, "{", "}");
|
|
52
|
+
if (objectCandidate) {
|
|
53
|
+
candidates.push(objectCandidate);
|
|
54
|
+
}
|
|
55
|
+
const arrayCandidate = sliceBetween(text, "[", "]");
|
|
56
|
+
if (arrayCandidate) {
|
|
57
|
+
candidates.push(arrayCandidate);
|
|
58
|
+
}
|
|
59
|
+
return candidates;
|
|
60
|
+
}
|
|
61
|
+
function extractFencedJson(text) {
|
|
62
|
+
const match = text.match(/```(?:json)?\s*([\s\S]*?)\s*```/i);
|
|
63
|
+
return match ? match[1].trim() : null;
|
|
64
|
+
}
|
|
65
|
+
function sliceBetween(text, open, close) {
|
|
66
|
+
const start = text.indexOf(open);
|
|
67
|
+
const end = text.lastIndexOf(close);
|
|
68
|
+
if (start === -1 || end === -1 || end <= start) {
|
|
69
|
+
return null;
|
|
70
|
+
}
|
|
71
|
+
return text.slice(start, end + 1).trim();
|
|
72
|
+
}
|
|
73
|
+
function tryParseJson(text) {
|
|
74
|
+
try {
|
|
75
|
+
return JSON.parse(text);
|
|
76
|
+
}
|
|
77
|
+
catch {
|
|
78
|
+
return null;
|
|
79
|
+
}
|
|
80
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@justram/pie",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Structured extraction library and CLI built on @mariozechner/pi-ai.",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"keywords": [
|
|
7
|
+
"llm",
|
|
8
|
+
"structured-data",
|
|
9
|
+
"extraction",
|
|
10
|
+
"cli",
|
|
11
|
+
"schema",
|
|
12
|
+
"pi-ai"
|
|
13
|
+
],
|
|
14
|
+
"type": "module",
|
|
15
|
+
"main": "./dist/index.js",
|
|
16
|
+
"types": "./dist/index.d.ts",
|
|
17
|
+
"exports": {
|
|
18
|
+
".": {
|
|
19
|
+
"types": "./dist/index.d.ts",
|
|
20
|
+
"default": "./dist/index.js"
|
|
21
|
+
},
|
|
22
|
+
"./cache": {
|
|
23
|
+
"types": "./dist/cache/index.d.ts",
|
|
24
|
+
"default": "./dist/cache/index.js"
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
"bin": {
|
|
28
|
+
"pie": "./bin/pie"
|
|
29
|
+
},
|
|
30
|
+
"files": [
|
|
31
|
+
"bin",
|
|
32
|
+
"dist",
|
|
33
|
+
"README.md",
|
|
34
|
+
"LICENSE",
|
|
35
|
+
"CHANGELOG.md"
|
|
36
|
+
],
|
|
37
|
+
"publishConfig": {
|
|
38
|
+
"access": "public"
|
|
39
|
+
},
|
|
40
|
+
"engines": {
|
|
41
|
+
"node": ">=20.0.0"
|
|
42
|
+
},
|
|
43
|
+
"scripts": {
|
|
44
|
+
"build": "tsc -p tsconfig.build.json",
|
|
45
|
+
"typecheck": "tsc -p tsconfig.base.json --noEmit",
|
|
46
|
+
"lint": "biome check .",
|
|
47
|
+
"lint:imports": "eslint --fix \"{src,test,spec,examples}/**/*.ts\"",
|
|
48
|
+
"check": "biome check --write . && npm run typecheck",
|
|
49
|
+
"test": "vitest run",
|
|
50
|
+
"test:watch": "vitest",
|
|
51
|
+
"test:e2e": "vitest run --config vitest.config.e2e.ts",
|
|
52
|
+
"prepare": "husky"
|
|
53
|
+
},
|
|
54
|
+
"dependencies": {
|
|
55
|
+
"@mariozechner/pi-ai": "^0.49.2",
|
|
56
|
+
"@silvia-odwyer/photon-node": "^0.3.4",
|
|
57
|
+
"@sinclair/typebox": "^0.34.46",
|
|
58
|
+
"minijinja-js": "^2.14.0"
|
|
59
|
+
},
|
|
60
|
+
"devDependencies": {
|
|
61
|
+
"@biomejs/biome": "^2.3.11",
|
|
62
|
+
"@types/node": "^22.10.2",
|
|
63
|
+
"@typescript-eslint/parser": "^8.46.0",
|
|
64
|
+
"eslint": "^9.36.0",
|
|
65
|
+
"eslint-plugin-simple-import-sort": "^12.1.1",
|
|
66
|
+
"fast-check": "^4.5.3",
|
|
67
|
+
"husky": "^9.1.7",
|
|
68
|
+
"typescript": "^5.9.3",
|
|
69
|
+
"vitest": "^4.0.16"
|
|
70
|
+
}
|
|
71
|
+
}
|