@contextos/core 0.2.3 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +244 -4
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -1963,7 +1963,7 @@ var RateLimiter = class {
1963
1963
  async waitForSlot() {
1964
1964
  const result = await this.checkLimit();
1965
1965
  if (!result.allowed) {
1966
- await new Promise((resolve2) => setTimeout(resolve2, result.waitTime + 100));
1966
+ await new Promise((resolve3) => setTimeout(resolve3, result.waitTime + 100));
1967
1967
  return this.waitForSlot();
1968
1968
  }
1969
1969
  }
@@ -2091,7 +2091,7 @@ var GeminiClient = class {
2091
2091
  const waitTime = retryAfter ? parseInt(retryAfter) * 1e3 : 6e4;
2092
2092
  if (retryCount < 3) {
2093
2093
  console.warn(`Rate limited. Waiting ${waitTime}ms before retry (${retryCount + 1}/3)...`);
2094
- await new Promise((resolve2) => setTimeout(resolve2, waitTime));
2094
+ await new Promise((resolve3) => setTimeout(resolve3, waitTime));
2095
2095
  return this.request(prompt, systemPrompt, retryCount + 1);
2096
2096
  }
2097
2097
  throw new Error(`Rate limit exceeded after ${retryCount} retries. Please try again later.`);
@@ -2275,6 +2275,18 @@ Output the compressed version directly.`;
2275
2275
  tokensSaved: originalTokens - newTokens
2276
2276
  };
2277
2277
  }
2278
+ /**
2279
+ * Generate code or content from a prompt
2280
+ * Used by AIGenerator for code generation
2281
+ */
2282
+ async generate(prompt) {
2283
+ const response = await this.request(prompt);
2284
+ return {
2285
+ text: response,
2286
+ tokensUsed: Math.ceil(response.length / 4)
2287
+ // Rough estimate
2288
+ };
2289
+ }
2278
2290
  };
2279
2291
  function createGeminiClient() {
2280
2292
  const apiKey = process.env.GEMINI_API_KEY;
@@ -3066,7 +3078,7 @@ var OpenAIAdapter = class {
3066
3078
  const waitTime = retryAfter ? parseInt(retryAfter) * 1e3 : 6e4;
3067
3079
  if (retryCount < 3) {
3068
3080
  console.warn(`OpenAI rate limited. Waiting ${waitTime}ms before retry (${retryCount + 1}/3)...`);
3069
- await new Promise((resolve2) => setTimeout(resolve2, waitTime));
3081
+ await new Promise((resolve3) => setTimeout(resolve3, waitTime));
3070
3082
  return this.complete(request, retryCount + 1);
3071
3083
  }
3072
3084
  return {
@@ -3166,7 +3178,7 @@ var AnthropicAdapter = class {
3166
3178
  const waitTime = retryAfter ? parseInt(retryAfter) * 1e3 : 6e4;
3167
3179
  if (retryCount < 3) {
3168
3180
  console.warn(`Anthropic rate limited. Waiting ${waitTime}ms before retry (${retryCount + 1}/3)...`);
3169
- await new Promise((resolve2) => setTimeout(resolve2, waitTime));
3181
+ await new Promise((resolve3) => setTimeout(resolve3, waitTime));
3170
3182
  return this.complete(request, retryCount + 1);
3171
3183
  }
3172
3184
  return {
@@ -7578,7 +7590,234 @@ function createLogger(config) {
7578
7590
  function setGlobalLogger(logger) {
7579
7591
  globalLogger = logger;
7580
7592
  }
7593
+
7594
+ // src/generator/index.ts
7595
+ import { existsSync as existsSync14, mkdirSync as mkdirSync9, writeFileSync as writeFileSync9, readFileSync as readFileSync13 } from "fs";
7596
+ import { dirname as dirname4, join as join13, resolve as resolve2, normalize as normalize3 } from "path";
7597
+ var AIGenerator = class {
7598
+ config = null;
7599
+ gemini = null;
7600
+ openai = null;
7601
+ rootDir = process.cwd();
7602
+ constructor(projectDir) {
7603
+ this.rootDir = projectDir || process.cwd();
7604
+ }
7605
+ /**
7606
+ * Initialize the generator
7607
+ */
7608
+ async initialize() {
7609
+ try {
7610
+ this.config = loadConfig(this.rootDir);
7611
+ this.rootDir = this.config.rootDir;
7612
+ } catch {
7613
+ }
7614
+ if (isGeminiAvailable()) {
7615
+ this.gemini = createGeminiClient();
7616
+ }
7617
+ if (isOpenAIAvailable()) {
7618
+ this.openai = createOpenAIAdapter();
7619
+ }
7620
+ if (!this.gemini && !this.openai) {
7621
+ throw new Error("No AI API key found. Set GEMINI_API_KEY or OPENAI_API_KEY environment variable.");
7622
+ }
7623
+ }
7624
+ /**
7625
+ * Generate code from a prompt
7626
+ */
7627
+ async generate(prompt, options = {}) {
7628
+ const model = options.model || "auto";
7629
+ const context = await this.buildContext();
7630
+ const fullPrompt = this.createPrompt(prompt, context);
7631
+ let response;
7632
+ let tokensUsed = 0;
7633
+ try {
7634
+ if (model === "gemini" || model === "auto" && this.gemini) {
7635
+ if (!this.gemini) throw new Error("Gemini not available");
7636
+ const result = await this.gemini.generate(fullPrompt);
7637
+ response = result.text;
7638
+ tokensUsed = result.tokensUsed;
7639
+ } else if (model === "openai" || model === "auto" && this.openai) {
7640
+ if (!this.openai) throw new Error("OpenAI not available");
7641
+ const result = await this.openai.complete({
7642
+ systemPrompt: "You are an expert software developer. Generate clean, production-ready code.",
7643
+ userMessage: fullPrompt,
7644
+ maxTokens: 8e3
7645
+ });
7646
+ response = result.content;
7647
+ tokensUsed = result.tokensUsed.total;
7648
+ } else {
7649
+ throw new Error("No AI model available");
7650
+ }
7651
+ } catch (error) {
7652
+ return {
7653
+ success: false,
7654
+ files: [],
7655
+ tokensUsed: 0,
7656
+ error: error instanceof Error ? error.message : String(error)
7657
+ };
7658
+ }
7659
+ const files = this.parseResponse(response);
7660
+ const maxFiles = options.maxFiles || 20;
7661
+ if (files.length > maxFiles) {
7662
+ return {
7663
+ success: false,
7664
+ files: [],
7665
+ tokensUsed,
7666
+ error: `Too many files generated (${files.length}). Max: ${maxFiles}`
7667
+ };
7668
+ }
7669
+ if (options.dryRun) {
7670
+ return {
7671
+ success: true,
7672
+ files,
7673
+ tokensUsed
7674
+ };
7675
+ }
7676
+ const writtenFiles = await this.writeFiles(files, options);
7677
+ return {
7678
+ success: true,
7679
+ files: writtenFiles,
7680
+ tokensUsed
7681
+ };
7682
+ }
7683
+ /**
7684
+ * Build context from project files
7685
+ */
7686
+ async buildContext() {
7687
+ const parts = [];
7688
+ const prdPaths = ["prd.md", "PRD.md", "docs/prd.md", "README.md"];
7689
+ for (const prdPath of prdPaths) {
7690
+ const fullPath = join13(this.rootDir, prdPath);
7691
+ if (existsSync14(fullPath)) {
7692
+ const content = readFileSync13(fullPath, "utf-8");
7693
+ parts.push(`## ${prdPath}
7694
+
7695
+ ${content}`);
7696
+ break;
7697
+ }
7698
+ }
7699
+ if (this.config) {
7700
+ parts.push(`## Project Info
7701
+ - Name: ${this.config.context.project.name}
7702
+ - Language: ${this.config.context.project.language}
7703
+ - Description: ${this.config.context.project.description || "N/A"}
7704
+ `);
7705
+ }
7706
+ return parts.join("\n\n---\n\n");
7707
+ }
7708
+ /**
7709
+ * Create the full AI prompt
7710
+ */
7711
+ createPrompt(userPrompt, context) {
7712
+ return `# Project Context
7713
+
7714
+ ${context}
7715
+
7716
+ ---
7717
+
7718
+ # Task
7719
+
7720
+ ${userPrompt}
7721
+
7722
+ ---
7723
+
7724
+ # Instructions
7725
+
7726
+ Generate the necessary code files. Use this EXACT format for each file:
7727
+
7728
+ \`\`\`path/to/filename.ext
7729
+ // file content here
7730
+ \`\`\`
7731
+
7732
+ IMPORTANT:
7733
+ - Put the FULL FILE PATH in the code block language tag (e.g. \`\`\`src/index.ts)
7734
+ - Generate complete, working code - no placeholders
7735
+ - Include all necessary imports
7736
+ - Follow best practices for the language
7737
+ - One file per code block
7738
+
7739
+ Generate the files now:`;
7740
+ }
7741
+ /**
7742
+ * Parse AI response into file objects
7743
+ */
7744
+ parseResponse(response) {
7745
+ const files = [];
7746
+ const codeBlockRegex = /```([^\n`]+)\n([\s\S]*?)```/g;
7747
+ let match;
7748
+ while ((match = codeBlockRegex.exec(response)) !== null) {
7749
+ const pathOrLang = match[1].trim();
7750
+ const content = match[2];
7751
+ if (pathOrLang.includes("/") || pathOrLang.includes("\\") || /\.\w+$/.test(pathOrLang)) {
7752
+ const filePath = pathOrLang.replace(/\\/g, "/");
7753
+ const ext = filePath.split(".").pop() || "";
7754
+ const language = this.getLanguageFromExtension(ext);
7755
+ const fullPath = join13(this.rootDir, filePath);
7756
+ const isNew = !existsSync14(fullPath);
7757
+ files.push({
7758
+ path: filePath,
7759
+ content: content.trim(),
7760
+ isNew,
7761
+ language
7762
+ });
7763
+ }
7764
+ }
7765
+ return files;
7766
+ }
7767
+ /**
7768
+ * Get language from file extension
7769
+ */
7770
+ getLanguageFromExtension(ext) {
7771
+ const map = {
7772
+ ts: "typescript",
7773
+ tsx: "typescript",
7774
+ js: "javascript",
7775
+ jsx: "javascript",
7776
+ py: "python",
7777
+ rs: "rust",
7778
+ go: "go",
7779
+ java: "java",
7780
+ md: "markdown",
7781
+ json: "json",
7782
+ yaml: "yaml",
7783
+ yml: "yaml",
7784
+ css: "css",
7785
+ html: "html"
7786
+ };
7787
+ return map[ext] || ext;
7788
+ }
7789
+ /**
7790
+ * Write files to disk
7791
+ */
7792
+ async writeFiles(files, options) {
7793
+ const written = [];
7794
+ for (const file of files) {
7795
+ const fullPath = resolve2(this.rootDir, file.path);
7796
+ const normalizedPath = normalize3(fullPath);
7797
+ if (!normalizedPath.startsWith(normalize3(this.rootDir))) {
7798
+ console.warn(`Skipping file outside project root: ${file.path}`);
7799
+ continue;
7800
+ }
7801
+ const dir = dirname4(fullPath);
7802
+ if (!existsSync14(dir)) {
7803
+ mkdirSync9(dir, { recursive: true });
7804
+ }
7805
+ if (!file.isNew && options.backupBeforeOverwrite) {
7806
+ const backupPath = `${fullPath}.bak`;
7807
+ const existingContent = readFileSync13(fullPath, "utf-8");
7808
+ writeFileSync9(backupPath, existingContent);
7809
+ }
7810
+ writeFileSync9(fullPath, file.content, "utf-8");
7811
+ written.push(file);
7812
+ }
7813
+ return written;
7814
+ }
7815
+ };
7816
+ function createAIGenerator(projectDir) {
7817
+ return new AIGenerator(projectDir);
7818
+ }
7581
7819
  export {
7820
+ AIGenerator,
7582
7821
  ASTParser,
7583
7822
  AnalyticsCollector,
7584
7823
  AnalyticsConfigSchema,
@@ -7632,6 +7871,7 @@ export {
7632
7871
  calculateCost,
7633
7872
  checkHealth,
7634
7873
  chunkCode,
7874
+ createAIGenerator,
7635
7875
  createAnthropicAdapter,
7636
7876
  createBlackboard,
7637
7877
  createContextAPI,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@contextos/core",
3
- "version": "0.2.3",
3
+ "version": "0.3.0",
4
4
  "description": "Core engine for ContextOS - context management, parsing, and ranking",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",