dirac-lang 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +8 -0
- package/COMMUNITY.md +465 -0
- package/LIBRARIES.md +172 -0
- package/NAMESPACES.md +366 -0
- package/PROMOTION.md +257 -0
- package/QUICKSTART-LIBRARY.md +93 -0
- package/README.md +257 -0
- package/config.yml +6 -0
- package/config.yml.openai +4 -0
- package/dirac-http/examples/demo.di +9 -0
- package/dirac-http/lib/index.di +12 -0
- package/dist/chunk-NDIRTD3D.js +217 -0
- package/dist/chunk-S625X7ME.js +1071 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +261 -0
- package/dist/index.d.ts +144 -0
- package/dist/index.js +22 -0
- package/dist/session-4QG7OERD.js +42 -0
- package/examples/add-demo.di +74 -0
- package/examples/add.bk +11 -0
- package/examples/advanced-math-demo.di +53 -0
- package/examples/calculator.di +32 -0
- package/examples/comprehensive.bk +29 -0
- package/examples/defvar-variable-demo.di +18 -0
- package/examples/direct-call.di +17 -0
- package/examples/disk-analysis.di +16 -0
- package/examples/executable-hello.di +7 -0
- package/examples/execute-demo.di +38 -0
- package/examples/file-manager.di +77 -0
- package/examples/file-stats.di +18 -0
- package/examples/hello.bk +1 -0
- package/examples/hello.di +5 -0
- package/examples/import-demo.di +31 -0
- package/examples/inline-test.bk +7 -0
- package/examples/lib/advanced-math.di +81 -0
- package/examples/lib/fileops.di +26 -0
- package/examples/lib/math.di +25 -0
- package/examples/lib/mongodb.di +96 -0
- package/examples/llm-agent.di +32 -0
- package/examples/llm-basic.di +12 -0
- package/examples/llm-command-no-exec.di +13 -0
- package/examples/llm-command.di +13 -0
- package/examples/llm-complex.di +141 -0
- package/examples/llm-recursive.di +31 -0
- package/examples/llm-reflection-test.di +19 -0
- package/examples/llm-subs.di +132 -0
- package/examples/llm-use-subs.di +6 -0
- package/examples/loop.di +12 -0
- package/examples/math-test.di +22 -0
- package/examples/mongodb-count-events.di +8 -0
- package/examples/mongodb-import-demo.di +25 -0
- package/examples/mongodb-simple-test.di +18 -0
- package/examples/nl-agent.di +47 -0
- package/examples/parameters-demo.di +68 -0
- package/examples/params-test.di +10 -0
- package/examples/recipe-chain.di +38 -0
- package/examples/recursive-llm.di +44 -0
- package/examples/sample-library/README.md +152 -0
- package/examples/sample-library/examples/demo.di +34 -0
- package/examples/sample-library/lib/index.di +65 -0
- package/examples/sample-library/package.json +31 -0
- package/examples/seamless.di +45 -0
- package/examples/shell-test.bk +10 -0
- package/examples/simple-import.di +13 -0
- package/examples/simple-recursive.di +26 -0
- package/examples/story-builder.di +45 -0
- package/examples/subroutine.di +23 -0
- package/examples/system-llm.di +21 -0
- package/examples/system-simple.di +3 -0
- package/examples/system-test.di +13 -0
- package/examples/task-assistant.di +27 -0
- package/examples/test-parameters.di +50 -0
- package/examples/two-styles.di +28 -0
- package/examples/var-debug.di +6 -0
- package/examples/var-inline.di +4 -0
- package/examples/var-test2.di +6 -0
- package/examples/variable-simple.di +16 -0
- package/examples/variable-test.di +22 -0
- package/filePath +1 -0
- package/greeting.txt +1 -0
- package/package.json +41 -0
- package/src/cli.ts +118 -0
- package/src/index.ts +33 -0
- package/src/llm/ollama.ts +58 -0
- package/src/runtime/braket-parser.ts +234 -0
- package/src/runtime/interpreter.ts +135 -0
- package/src/runtime/parser.ts +151 -0
- package/src/runtime/session.ts +228 -0
- package/src/tags/assign.ts +37 -0
- package/src/tags/call.ts +156 -0
- package/src/tags/defvar.ts +56 -0
- package/src/tags/eval.ts +68 -0
- package/src/tags/execute.ts +52 -0
- package/src/tags/expr.ts +128 -0
- package/src/tags/if.ts +58 -0
- package/src/tags/import.ts +66 -0
- package/src/tags/index.ts +37 -0
- package/src/tags/llm.ts +207 -0
- package/src/tags/loop.ts +43 -0
- package/src/tags/mongodb.ts +70 -0
- package/src/tags/output.ts +23 -0
- package/src/tags/parameters.ts +79 -0
- package/src/tags/require_module.ts +19 -0
- package/src/tags/subroutine.ts +52 -0
- package/src/tags/system.ts +70 -0
- package/src/tags/variable.ts +25 -0
- package/src/types/index.ts +101 -0
- package/src/utils/llm-adapter.ts +113 -0
- package/tools/create-library.sh +175 -0
- package/tsconfig.json +19 -0
package/README.md
ADDED
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
# Dirac: The Agentic Recursive Language for LLM-Augmented Computing
|
|
2
|
+
|
|
3
|
+
## What is Dirac?
|
|
4
|
+
|
|
5
|
+
Dirac is a **declarative execution language** specifically designed for the AI era, where large language models (LLMs) are not just tools, but active participants in code execution. It's named after physicist Paul Dirac and his bra-ket notation, reflecting its dual nature: bridging human-readable declarations with machine execution.
|
|
6
|
+
|
|
7
|
+
## The Recursive LLM Paradigm
|
|
8
|
+
|
|
9
|
+
Traditional programming languages separate code from AI. You write code, then separately call an LLM API. Dirac **eliminates this boundary**:
|
|
10
|
+
|
|
11
|
+
```xml
|
|
12
|
+
<llm execute="true">
|
|
13
|
+
Create a Dirac program that lists all .txt files,
|
|
14
|
+
reads the first one, and summarizes it.
|
|
15
|
+
</llm>
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
The LLM doesn't just respond—it **generates Dirac code that immediately executes**. The generated code can itself call LLMs, creating a **recursive chain** where AI and execution seamlessly interweave.
|
|
19
|
+
|
|
20
|
+
## Agentic by Design
|
|
21
|
+
|
|
22
|
+
Dirac treats LLMs as **autonomous agents** that can:
|
|
23
|
+
|
|
24
|
+
- **Generate executable code** on-the-fly
|
|
25
|
+
- **Make decisions** based on runtime data
|
|
26
|
+
- **Invoke system commands** and process their output
|
|
27
|
+
- **Call themselves recursively** to break down complex tasks
|
|
28
|
+
- **Import and compose libraries** for modular problem-solving
|
|
29
|
+
|
|
30
|
+
Example of an agentic workflow:
|
|
31
|
+
|
|
32
|
+
```xml
|
|
33
|
+
<llm output="fileList">
|
|
34
|
+
<system>ls -la</system>
|
|
35
|
+
Analyze these files and create Dirac code to process them.
|
|
36
|
+
</llm>
|
|
37
|
+
|
|
38
|
+
<execute source="fileList"/> <!-- LLM-generated code runs here -->
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
## Neural-Symbolic AI: Bridging Symbolic Reasoning and Neural Networks
|
|
43
|
+
|
|
44
|
+
Dirac is not just agentic—it’s also a natural fit for **neural-symbolic AI**. Its bra/ket-inspired knowledge representation allows you to express and connect symbolic logic and neural computation in a unified language.
|
|
45
|
+
|
|
46
|
+
**Example: Aristotle’s Syllogism**
|
|
47
|
+
|
|
48
|
+
- All humans are mortal.
|
|
49
|
+
- Socrates is a human.
|
|
50
|
+
- Therefore, Socrates is mortal.
|
|
51
|
+
|
|
52
|
+
In Dirac’s bra/ket notation, this can be represented as:
|
|
53
|
+
- `|mortal⟩⟨human|` (all humans are mortal)
|
|
54
|
+
- `|human⟩⟨Socrates|` (Socrates is a human)
|
|
55
|
+
|
|
56
|
+
When you ask `|Socrates⟩`, chaining these together yields `|mortal⟩`.
|
|
57
|
+
|
|
58
|
+
From a **neural network** perspective, these bra/ket pairs are like matrices (or tensors), and the input `|Socrates⟩` is a vector. The network applies transformations—possibly nonlinear—to produce an output.
|
|
59
|
+
|
|
60
|
+
From a **symbolic AI** perspective, these are like Dirac subroutines:
|
|
61
|
+
```xml
|
|
62
|
+
<subroutine name="human">
|
|
63
|
+
<mortal/>
|
|
64
|
+
</subroutine>
|
|
65
|
+
```
|
|
66
|
+
Or, in Dirac’s shorthand:
|
|
67
|
+
```
|
|
68
|
+
<human|
|
|
69
|
+
|mortal>
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
**Dirac bridges these worlds:**
|
|
73
|
+
- As a symbolic language, it lets you define and chain logical relationships explicitly.
|
|
74
|
+
- As a bridge to neural networks, it enables LLMs and other neural models to participate in these chains, providing generative, nonlinear reasoning when needed.
|
|
75
|
+
|
|
76
|
+
Dirac is the missing link for building systems where **symbolic structure and neural intelligence work together**—making it ideal for the next generation of explainable, powerful AI.
|
|
77
|
+
|
|
78
|
+
The LLM sees real system state, generates appropriate code, and that code executes—all in one flow.
|
|
79
|
+
|
|
80
|
+
## Key Features
|
|
81
|
+
|
|
82
|
+
### 1. **Seamless LLM Integration**
|
|
83
|
+
LLMs are first-class citizens, not afterthoughts:
|
|
84
|
+
```xml
|
|
85
|
+
<llm>What is 2+2?</llm> <!-- Direct output -->
|
|
86
|
+
<llm output="result">Calculate 2+2</llm> <!-- Store in variable -->
|
|
87
|
+
<llm execute="true">Write a loop</llm> <!-- Generate and execute code -->
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
### 2. **Declarative Simplicity**
|
|
91
|
+
Express **what** you want, not **how** to do it:
|
|
92
|
+
```xml
|
|
93
|
+
<system>df -h</system> <!-- Run shell command -->
|
|
94
|
+
<llm>Summarize the disk usage above</llm>
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### 3. **Recursive Composition**
|
|
98
|
+
Programs can generate programs:
|
|
99
|
+
```xml
|
|
100
|
+
<subroutine name="analyze">
|
|
101
|
+
<llm execute="true">
|
|
102
|
+
Generate code to analyze <variable name="data"/>
|
|
103
|
+
</llm>
|
|
104
|
+
</subroutine>
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
### 4. **Bra-Ket Notation** (Optional Compact Syntax)
|
|
108
|
+
Inspired by quantum mechanics, our `.bk` format reduces verbosity:
|
|
109
|
+
|
|
110
|
+
**XML (.di):**
|
|
111
|
+
```xml
|
|
112
|
+
<subroutine name="greet">
|
|
113
|
+
<parameters select="@name"/>
|
|
114
|
+
<output>Hello, <variable name="name"/>!</output>
|
|
115
|
+
</subroutine>
|
|
116
|
+
<greet name="World"/>
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
**Bra-Ket (.bk):**
|
|
120
|
+
```
|
|
121
|
+
<greet|
|
|
122
|
+
|parameters select=@name>
|
|
123
|
+
|output>Hello, |variable name=name>!
|
|
124
|
+
|
|
125
|
+
|greet name=World>
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
### 5. **Library Ecosystem**
|
|
129
|
+
Import and compose functionality with namespace-safe prefixes:
|
|
130
|
+
```xml
|
|
131
|
+
<import src="dirac-http"/>
|
|
132
|
+
<HTTP_GET url="https://api.example.com"/>
|
|
133
|
+
|
|
134
|
+
<import src="dirac-database"/>
|
|
135
|
+
<DB_QUERY>SELECT * FROM users</DB_QUERY>
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
## Real-World Use Cases
|
|
139
|
+
|
|
140
|
+
### System Administration
|
|
141
|
+
```xml
|
|
142
|
+
<llm execute="true">
|
|
143
|
+
<system>docker ps</system>
|
|
144
|
+
Analyze these containers and create Dirac code to
|
|
145
|
+
restart any that are unhealthy.
|
|
146
|
+
</llm>
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
### Data Analysis
|
|
150
|
+
```xml
|
|
151
|
+
<llm output="analysis">
|
|
152
|
+
<system>cat data.csv | head -20</system>
|
|
153
|
+
What patterns do you see? Generate Dirac code to process the full file.
|
|
154
|
+
</llm>
|
|
155
|
+
<execute source="analysis"/>
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
### Task Automation
|
|
159
|
+
```xml
|
|
160
|
+
<llm execute="true">
|
|
161
|
+
I need to backup all .js files modified today to ~/backup.
|
|
162
|
+
Write Dirac code to do this.
|
|
163
|
+
</llm>
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
### Multi-Agent Workflows
|
|
167
|
+
```xml
|
|
168
|
+
<llm output="step1" execute="true">
|
|
169
|
+
Task: Analyze logs in /var/log. Generate code for this step.
|
|
170
|
+
</llm>
|
|
171
|
+
|
|
172
|
+
<llm execute="true">
|
|
173
|
+
Previous step output: <variable name="step1"/>
|
|
174
|
+
Now generate code to summarize findings and email the report.
|
|
175
|
+
</llm>
|
|
176
|
+
```
|
|
177
|
+
|
|
178
|
+
## Why "Recursive" Matters
|
|
179
|
+
|
|
180
|
+
In traditional programming, recursion means a function calling itself. In Dirac, **the entire execution model is recursive**:
|
|
181
|
+
|
|
182
|
+
1. **Code generates code**: LLMs output Dirac programs
|
|
183
|
+
2. **Programs invoke LLMs**: Those programs can ask LLMs for more code
|
|
184
|
+
3. **Infinite depth**: This can continue to arbitrary depths (with safety limits)
|
|
185
|
+
4. **Context flows**: Each layer has access to results from previous layers
|
|
186
|
+
|
|
187
|
+
This creates a **self-extending** execution environment where the boundary between "prompt" and "program" dissolves.
|
|
188
|
+
|
|
189
|
+
## Installation
|
|
190
|
+
|
|
191
|
+
```bash
|
|
192
|
+
npm install -g dirac-lang
|
|
193
|
+
```
|
|
194
|
+
|
|
195
|
+
## Quick Start
|
|
196
|
+
|
|
197
|
+
**hello.di:**
|
|
198
|
+
```xml
|
|
199
|
+
<dirac>
|
|
200
|
+
<output>Hello, World!</output>
|
|
201
|
+
</dirac>
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
**Run it:**
|
|
205
|
+
```bash
|
|
206
|
+
dirac hello.di
|
|
207
|
+
```
|
|
208
|
+
|
|
209
|
+
**With LLM (requires API key):**
|
|
210
|
+
```bash
|
|
211
|
+
export ANTHROPIC_API_KEY=your-key
|
|
212
|
+
echo '<dirac><llm>Write a haiku about code</llm></dirac>' | dirac -
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
## Philosophy
|
|
216
|
+
|
|
217
|
+
Dirac embraces three principles:
|
|
218
|
+
|
|
219
|
+
1. **LLMs are co-pilots, not tools**: They execute alongside your code, not as external services
|
|
220
|
+
2. **Declarative over imperative**: Say what you want, let AI figure out how
|
|
221
|
+
3. **Composable intelligence**: Small, reusable pieces combine into powerful workflows
|
|
222
|
+
|
|
223
|
+
## Future Vision
|
|
224
|
+
|
|
225
|
+
We're building toward a world where:
|
|
226
|
+
- **Natural language prompts** compile to executable Dirac
|
|
227
|
+
- **AI-generated libraries** extend functionality on-demand
|
|
228
|
+
- **Self-improving programs** refactor themselves based on execution patterns
|
|
229
|
+
- **Multi-model orchestration** lets different LLMs collaborate on subtasks
|
|
230
|
+
|
|
231
|
+
## Community
|
|
232
|
+
|
|
233
|
+
- **GitHub**: [diraclang/dirac](https://github.com/diraclang/dirac)
|
|
234
|
+
- **npm**: [dirac-lang](https://www.npmjs.com/package/dirac-lang)
|
|
235
|
+
- **License**: MIT
|
|
236
|
+
- **Status**: Active development (v0.1.0)
|
|
237
|
+
|
|
238
|
+
## Join the Movement
|
|
239
|
+
|
|
240
|
+
Dirac is more than a language—it's a **paradigm shift** in how we think about code and AI. If you believe that:
|
|
241
|
+
|
|
242
|
+
- Programming should be more **declarative**
|
|
243
|
+
- LLMs should be **execution partners**, not API endpoints
|
|
244
|
+
- Code should **generate code** dynamically
|
|
245
|
+
- The future is **agentic** and **recursive**
|
|
246
|
+
|
|
247
|
+
...then Dirac is for you.
|
|
248
|
+
|
|
249
|
+
**Start building the future today.**
|
|
250
|
+
|
|
251
|
+
```bash
|
|
252
|
+
npm install -g dirac-lang
|
|
253
|
+
```
|
|
254
|
+
|
|
255
|
+
---
|
|
256
|
+
|
|
257
|
+
*"In the quantum realm, a bra meets a ket to produce reality. In Dirac, a declaration meets an LLM to produce execution."*
|
package/config.yml
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
<dirac>
|
|
2
|
+
|
|
3
|
+
<!-- All tags use the library prefix to avoid naming conflicts -->
|
|
4
|
+
<subroutine name="HTTP_EXAMPLE">
|
|
5
|
+
<eval>
|
|
6
|
+
const caller = getParams();
|
|
7
|
+
const input = caller.attributes.input || 'no input';
|
|
8
|
+
console.log('Processed: ' + input);
|
|
9
|
+
</eval>
|
|
10
|
+
</subroutine>
|
|
11
|
+
|
|
12
|
+
</dirac>
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
// src/runtime/session.ts
|
|
2
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
3
|
+
import OpenAI from "openai";
|
|
4
|
+
|
|
5
|
+
// src/llm/ollama.ts
|
|
6
|
+
var OllamaClient = class {
|
|
7
|
+
baseUrl;
|
|
8
|
+
constructor({ baseUrl = "http://localhost:11434" } = {}) {
|
|
9
|
+
this.baseUrl = baseUrl;
|
|
10
|
+
}
|
|
11
|
+
async generate({ model, prompt, options = {} }) {
|
|
12
|
+
const res = await fetch(`${this.baseUrl}/api/generate`, {
|
|
13
|
+
method: "POST",
|
|
14
|
+
headers: { "Content-Type": "application/json" },
|
|
15
|
+
body: JSON.stringify({ model, prompt, ...options })
|
|
16
|
+
});
|
|
17
|
+
let output = "";
|
|
18
|
+
for await (const chunk of res.body) {
|
|
19
|
+
const chunkStr = chunk.toString();
|
|
20
|
+
const lines = chunkStr.split("\n");
|
|
21
|
+
for (const line of lines) {
|
|
22
|
+
if (line.trim()) {
|
|
23
|
+
let jsonLine = line.trim();
|
|
24
|
+
if (/^\d+(,\d+)*$/.test(jsonLine)) {
|
|
25
|
+
jsonLine = jsonLine.split(",").map((n) => String.fromCharCode(Number(n))).join("");
|
|
26
|
+
}
|
|
27
|
+
try {
|
|
28
|
+
const obj = JSON.parse(jsonLine);
|
|
29
|
+
if (obj.response) output += obj.response;
|
|
30
|
+
} catch (err) {
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return output;
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
var OllamaProvider = class {
|
|
39
|
+
client;
|
|
40
|
+
model;
|
|
41
|
+
constructor(options = {}) {
|
|
42
|
+
this.client = new OllamaClient(options);
|
|
43
|
+
this.model = options.model || "llama2";
|
|
44
|
+
}
|
|
45
|
+
async complete(prompt, opts = {}) {
|
|
46
|
+
return await this.client.generate({
|
|
47
|
+
model: this.model,
|
|
48
|
+
prompt,
|
|
49
|
+
options: opts
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
// src/runtime/session.ts
|
|
55
|
+
function createSession(config = {}) {
|
|
56
|
+
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
|
57
|
+
const openaiKey = config.apiKey || process.env.OPENAI_API_KEY;
|
|
58
|
+
const llmProvider = config.llmProvider || process.env.LLM_PROVIDER;
|
|
59
|
+
const ollamaModel = config.llmModel || process.env.LLM_MODEL || "llama2";
|
|
60
|
+
let llmClient;
|
|
61
|
+
switch (llmProvider) {
|
|
62
|
+
case "ollama":
|
|
63
|
+
llmClient = new OllamaProvider({ model: ollamaModel });
|
|
64
|
+
break;
|
|
65
|
+
case "anthropic":
|
|
66
|
+
if (!anthropicKey) throw new Error("ANTHROPIC_API_KEY required for Anthropic provider");
|
|
67
|
+
llmClient = new Anthropic({ apiKey: anthropicKey });
|
|
68
|
+
break;
|
|
69
|
+
case "openai":
|
|
70
|
+
if (!openaiKey) throw new Error("OPENAI_API_KEY required for OpenAI provider");
|
|
71
|
+
llmClient = new OpenAI({ apiKey: openaiKey });
|
|
72
|
+
break;
|
|
73
|
+
default:
|
|
74
|
+
throw new Error("No valid LLM provider configured. Set llmProvider in config or LLM_PROVIDER env.");
|
|
75
|
+
}
|
|
76
|
+
return {
|
|
77
|
+
variables: [],
|
|
78
|
+
subroutines: [],
|
|
79
|
+
varBoundary: 0,
|
|
80
|
+
subBoundary: 0,
|
|
81
|
+
parameterStack: [],
|
|
82
|
+
output: [],
|
|
83
|
+
llmClient,
|
|
84
|
+
limits: {
|
|
85
|
+
maxLLMCalls: config.maxLLMCalls || 100,
|
|
86
|
+
currentLLMCalls: 0,
|
|
87
|
+
maxDepth: config.maxDepth || 50,
|
|
88
|
+
currentDepth: 0
|
|
89
|
+
},
|
|
90
|
+
isReturn: false,
|
|
91
|
+
isBreak: false,
|
|
92
|
+
debug: config.debug || false
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
function setVariable(session, name, value, visible = false) {
|
|
96
|
+
session.variables.push({
|
|
97
|
+
name,
|
|
98
|
+
value,
|
|
99
|
+
visible,
|
|
100
|
+
boundary: session.varBoundary,
|
|
101
|
+
passby: "value"
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
function getVariable(session, name) {
|
|
105
|
+
for (let i = session.variables.length - 1; i >= 0; i--) {
|
|
106
|
+
if (session.variables[i].name === name) {
|
|
107
|
+
return session.variables[i].value;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
return void 0;
|
|
111
|
+
}
|
|
112
|
+
function hasVariable(session, name) {
|
|
113
|
+
return session.variables.some((v) => v.name === name);
|
|
114
|
+
}
|
|
115
|
+
function setBoundary(session) {
|
|
116
|
+
const oldBoundary = session.varBoundary;
|
|
117
|
+
session.varBoundary = session.variables.length;
|
|
118
|
+
return oldBoundary;
|
|
119
|
+
}
|
|
120
|
+
function popToBoundary(session) {
|
|
121
|
+
session.variables = session.variables.slice(0, session.varBoundary);
|
|
122
|
+
}
|
|
123
|
+
function cleanToBoundary(session) {
|
|
124
|
+
const kept = [];
|
|
125
|
+
for (let i = 0; i < session.varBoundary; i++) {
|
|
126
|
+
kept.push(session.variables[i]);
|
|
127
|
+
}
|
|
128
|
+
for (let i = session.varBoundary; i < session.variables.length; i++) {
|
|
129
|
+
if (session.variables[i].visible) {
|
|
130
|
+
kept.push(session.variables[i]);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
session.variables = kept;
|
|
134
|
+
session.varBoundary = kept.length;
|
|
135
|
+
}
|
|
136
|
+
function registerSubroutine(session, name, element, description, parameters) {
|
|
137
|
+
session.subroutines.push({
|
|
138
|
+
name,
|
|
139
|
+
element,
|
|
140
|
+
boundary: session.subBoundary,
|
|
141
|
+
description,
|
|
142
|
+
parameters
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
function getSubroutine(session, name) {
|
|
146
|
+
for (let i = session.subroutines.length - 1; i >= 0; i--) {
|
|
147
|
+
if (session.subroutines[i].name === name) {
|
|
148
|
+
return session.subroutines[i].element;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
return void 0;
|
|
152
|
+
}
|
|
153
|
+
function setSubroutineBoundary(session) {
|
|
154
|
+
const oldBoundary = session.subBoundary;
|
|
155
|
+
session.subBoundary = session.subroutines.length;
|
|
156
|
+
return oldBoundary;
|
|
157
|
+
}
|
|
158
|
+
function popSubroutinesToBoundary(session) {
|
|
159
|
+
session.subroutines = session.subroutines.slice(0, session.subBoundary);
|
|
160
|
+
}
|
|
161
|
+
function cleanSubroutinesToBoundary(session) {
|
|
162
|
+
popSubroutinesToBoundary(session);
|
|
163
|
+
}
|
|
164
|
+
function substituteVariables(session, text) {
|
|
165
|
+
let decoded = text.replace(/ /g, "\n").replace(/ /g, "\r").replace(/	/g, " ").replace(/</g, "<").replace(/>/g, ">").replace(/&/g, "&").replace(/"/g, '"').replace(/'/g, "'");
|
|
166
|
+
return decoded.replace(/\$\{?(\w+)\}?/g, (match, varName) => {
|
|
167
|
+
const value = getVariable(session, varName);
|
|
168
|
+
return value !== void 0 ? String(value) : match;
|
|
169
|
+
}).replace(/\{(\w+)\}/g, (match, varName) => {
|
|
170
|
+
const value = getVariable(session, varName);
|
|
171
|
+
return value !== void 0 ? String(value) : match;
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
function emit(session, content) {
|
|
175
|
+
session.output.push(content);
|
|
176
|
+
}
|
|
177
|
+
function getOutput(session) {
|
|
178
|
+
return session.output.join("");
|
|
179
|
+
}
|
|
180
|
+
function pushParameters(session, params) {
|
|
181
|
+
session.parameterStack.push(params);
|
|
182
|
+
}
|
|
183
|
+
function popParameters(session) {
|
|
184
|
+
return session.parameterStack.pop();
|
|
185
|
+
}
|
|
186
|
+
function getCurrentParameters(session) {
|
|
187
|
+
return session.parameterStack[session.parameterStack.length - 1];
|
|
188
|
+
}
|
|
189
|
+
function getAvailableSubroutines(session) {
|
|
190
|
+
return session.subroutines.map((sub) => ({
|
|
191
|
+
name: sub.name,
|
|
192
|
+
description: sub.description,
|
|
193
|
+
parameters: sub.parameters
|
|
194
|
+
}));
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
export {
|
|
198
|
+
createSession,
|
|
199
|
+
setVariable,
|
|
200
|
+
getVariable,
|
|
201
|
+
hasVariable,
|
|
202
|
+
setBoundary,
|
|
203
|
+
popToBoundary,
|
|
204
|
+
cleanToBoundary,
|
|
205
|
+
registerSubroutine,
|
|
206
|
+
getSubroutine,
|
|
207
|
+
setSubroutineBoundary,
|
|
208
|
+
popSubroutinesToBoundary,
|
|
209
|
+
cleanSubroutinesToBoundary,
|
|
210
|
+
substituteVariables,
|
|
211
|
+
emit,
|
|
212
|
+
getOutput,
|
|
213
|
+
pushParameters,
|
|
214
|
+
popParameters,
|
|
215
|
+
getCurrentParameters,
|
|
216
|
+
getAvailableSubroutines
|
|
217
|
+
};
|