arc-lang 0.5.7 → 0.5.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/interpreter.js +335 -18
- package/dist/lexer.js +3 -2
- package/dist/modules.js +8 -1
- package/dist/parser.js +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +1 -1
package/dist/interpreter.js
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
// Arc Language Tree-Walking Interpreter
|
|
2
2
|
import * as nodeCrypto from "crypto";
|
|
3
3
|
import * as nodeOs from "os";
|
|
4
|
+
import * as nodeFs from "fs";
|
|
5
|
+
import { execSync } from "child_process";
|
|
4
6
|
class Env {
|
|
5
7
|
parent;
|
|
6
8
|
vars = new Map();
|
|
@@ -71,6 +73,75 @@ function toStr(v) {
|
|
|
71
73
|
return `<async>`;
|
|
72
74
|
return String(v);
|
|
73
75
|
}
|
|
76
|
+
function syncFetch(method, url, body) {
|
|
77
|
+
// Build a small Node script that does fetch and prints JSON result
|
|
78
|
+
// Extract body string: if it's a map with a "data" field, use that; otherwise stringify
|
|
79
|
+
// Also extract optional "headers" map for custom HTTP headers
|
|
80
|
+
let bodyStr = null;
|
|
81
|
+
let customHeaders = {};
|
|
82
|
+
if (body != null) {
|
|
83
|
+
if (typeof body === "object" && "__map" in body) {
|
|
84
|
+
const m = body.entries;
|
|
85
|
+
const d = m.get("data");
|
|
86
|
+
const h = m.get("headers");
|
|
87
|
+
if (h != null && typeof h === "object" && "__map" in h) {
|
|
88
|
+
const hm = h.entries;
|
|
89
|
+
for (const [k, v] of hm)
|
|
90
|
+
customHeaders[k] = toStr(v);
|
|
91
|
+
}
|
|
92
|
+
bodyStr = d != null ? toStr(d) : toStr(body);
|
|
93
|
+
}
|
|
94
|
+
else {
|
|
95
|
+
bodyStr = toStr(body);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
const bodyJson = bodyStr != null ? JSON.stringify(bodyStr) : "null";
|
|
99
|
+
// Pass config via env to avoid shell escaping issues
|
|
100
|
+
const fetchConfig = JSON.stringify({ method, url, body: bodyStr, headers: customHeaders });
|
|
101
|
+
const script = `const c=JSON.parse(process.env.ARC_FETCH);(async()=>{const o={method:c.method,headers:{...c.headers}};if(c.body!==null){o.body=c.body;if(!o.headers["Content-Type"])o.headers["Content-Type"]="application/json";}try{const r=await fetch(c.url,o);const t=await r.text();let d;try{d=JSON.parse(t)}catch{d=t}console.log(JSON.stringify({ok:true,status:r.status,data:d}))}catch(e){console.log(JSON.stringify({ok:false,status:0,data:e.message}))}})()`;
|
|
102
|
+
try {
|
|
103
|
+
const raw = execSync(`node -e "${script.replace(/"/g, '\\"')}"`, {
|
|
104
|
+
timeout: 30000,
|
|
105
|
+
encoding: "utf-8",
|
|
106
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
107
|
+
env: { ...process.env, ARC_FETCH: fetchConfig },
|
|
108
|
+
}).trim();
|
|
109
|
+
const parsed = JSON.parse(raw);
|
|
110
|
+
const entries = new Map();
|
|
111
|
+
entries.set("ok", parsed.ok);
|
|
112
|
+
entries.set("status", parsed.status);
|
|
113
|
+
// Convert nested objects/arrays to Arc values
|
|
114
|
+
entries.set("data", jsToArc(parsed.data));
|
|
115
|
+
entries.set("method", method);
|
|
116
|
+
entries.set("url", url);
|
|
117
|
+
return { __map: true, entries };
|
|
118
|
+
}
|
|
119
|
+
catch (e) {
|
|
120
|
+
const entries = new Map();
|
|
121
|
+
entries.set("ok", false);
|
|
122
|
+
entries.set("status", 0);
|
|
123
|
+
entries.set("data", e.message || "fetch error");
|
|
124
|
+
entries.set("method", method);
|
|
125
|
+
entries.set("url", url);
|
|
126
|
+
return { __map: true, entries };
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
function jsToArc(v) {
|
|
130
|
+
if (v === null || v === undefined)
|
|
131
|
+
return null;
|
|
132
|
+
if (typeof v === "number" || typeof v === "string" || typeof v === "boolean")
|
|
133
|
+
return v;
|
|
134
|
+
if (Array.isArray(v))
|
|
135
|
+
return v.map(jsToArc);
|
|
136
|
+
if (typeof v === "object") {
|
|
137
|
+
const entries = new Map();
|
|
138
|
+
for (const [k, val] of Object.entries(v)) {
|
|
139
|
+
entries.set(k, jsToArc(val));
|
|
140
|
+
}
|
|
141
|
+
return { __map: true, entries };
|
|
142
|
+
}
|
|
143
|
+
return String(v);
|
|
144
|
+
}
|
|
74
145
|
function resolveAsync(v) {
|
|
75
146
|
if (v && typeof v === "object" && "__async" in v) {
|
|
76
147
|
return v.thunk();
|
|
@@ -644,7 +715,7 @@ function makePrelude(env) {
|
|
|
644
715
|
case "os.list_dir": {
|
|
645
716
|
try {
|
|
646
717
|
const fs = require("fs");
|
|
647
|
-
return
|
|
718
|
+
return nodeFs.readdirSync(args[0]);
|
|
648
719
|
}
|
|
649
720
|
catch {
|
|
650
721
|
return [];
|
|
@@ -653,7 +724,7 @@ function makePrelude(env) {
|
|
|
653
724
|
case "os.is_file": {
|
|
654
725
|
try {
|
|
655
726
|
const fs = require("fs");
|
|
656
|
-
return
|
|
727
|
+
return nodeFs.statSync(args[0]).isFile();
|
|
657
728
|
}
|
|
658
729
|
catch {
|
|
659
730
|
return false;
|
|
@@ -662,7 +733,7 @@ function makePrelude(env) {
|
|
|
662
733
|
case "os.is_dir": {
|
|
663
734
|
try {
|
|
664
735
|
const fs = require("fs");
|
|
665
|
-
return
|
|
736
|
+
return nodeFs.statSync(args[0]).isDirectory();
|
|
666
737
|
}
|
|
667
738
|
catch {
|
|
668
739
|
return false;
|
|
@@ -671,7 +742,7 @@ function makePrelude(env) {
|
|
|
671
742
|
case "os.mkdir": {
|
|
672
743
|
try {
|
|
673
744
|
const fs = require("fs");
|
|
674
|
-
|
|
745
|
+
nodeFs.mkdirSync(args[0], { recursive: true });
|
|
675
746
|
return true;
|
|
676
747
|
}
|
|
677
748
|
catch {
|
|
@@ -681,7 +752,7 @@ function makePrelude(env) {
|
|
|
681
752
|
case "os.rmdir": {
|
|
682
753
|
try {
|
|
683
754
|
const fs = require("fs");
|
|
684
|
-
|
|
755
|
+
nodeFs.rmdirSync(args[0]);
|
|
685
756
|
return true;
|
|
686
757
|
}
|
|
687
758
|
catch {
|
|
@@ -691,7 +762,7 @@ function makePrelude(env) {
|
|
|
691
762
|
case "os.remove": {
|
|
692
763
|
try {
|
|
693
764
|
const fs = require("fs");
|
|
694
|
-
|
|
765
|
+
nodeFs.unlinkSync(args[0]);
|
|
695
766
|
return true;
|
|
696
767
|
}
|
|
697
768
|
catch {
|
|
@@ -701,7 +772,7 @@ function makePrelude(env) {
|
|
|
701
772
|
case "os.rename": {
|
|
702
773
|
try {
|
|
703
774
|
const fs = require("fs");
|
|
704
|
-
|
|
775
|
+
nodeFs.renameSync(args[0], args[1]);
|
|
705
776
|
return true;
|
|
706
777
|
}
|
|
707
778
|
catch {
|
|
@@ -711,7 +782,7 @@ function makePrelude(env) {
|
|
|
711
782
|
case "os.copy": {
|
|
712
783
|
try {
|
|
713
784
|
const fs = require("fs");
|
|
714
|
-
|
|
785
|
+
nodeFs.copyFileSync(args[0], args[1]);
|
|
715
786
|
return true;
|
|
716
787
|
}
|
|
717
788
|
catch {
|
|
@@ -721,7 +792,7 @@ function makePrelude(env) {
|
|
|
721
792
|
case "os.file_size": {
|
|
722
793
|
try {
|
|
723
794
|
const fs = require("fs");
|
|
724
|
-
return
|
|
795
|
+
return nodeFs.statSync(args[0]).size;
|
|
725
796
|
}
|
|
726
797
|
catch {
|
|
727
798
|
return null;
|
|
@@ -736,7 +807,7 @@ function makePrelude(env) {
|
|
|
736
807
|
throw new Error(`Potentially unsafe command (injection risk): ${cmd}`);
|
|
737
808
|
}
|
|
738
809
|
const cp = require("child_process");
|
|
739
|
-
return
|
|
810
|
+
return execSync(cmd, { encoding: "utf-8", timeout: 10000 }).trim();
|
|
740
811
|
}
|
|
741
812
|
catch (e) {
|
|
742
813
|
if (e.message?.includes("injection risk"))
|
|
@@ -744,16 +815,263 @@ function makePrelude(env) {
|
|
|
744
815
|
return null;
|
|
745
816
|
}
|
|
746
817
|
}
|
|
818
|
+
// --- prompt natives ---
|
|
819
|
+
case "prompt.token_count": {
|
|
820
|
+
const text = String(args[0] ?? "");
|
|
821
|
+
return Math.ceil(text.length / 4);
|
|
822
|
+
}
|
|
823
|
+
case "prompt.token_truncate": {
|
|
824
|
+
const text = String(args[0] ?? "");
|
|
825
|
+
const maxTokens = args[1];
|
|
826
|
+
const maxChars = maxTokens * 4;
|
|
827
|
+
if (text.length <= maxChars)
|
|
828
|
+
return text;
|
|
829
|
+
return text.slice(0, maxChars);
|
|
830
|
+
}
|
|
831
|
+
case "prompt.chunk": {
|
|
832
|
+
const text = String(args[0] ?? "");
|
|
833
|
+
const maxTokens = args[1];
|
|
834
|
+
const chunkSize = maxTokens * 4;
|
|
835
|
+
const chunks = [];
|
|
836
|
+
for (let i = 0; i < text.length; i += chunkSize) {
|
|
837
|
+
chunks.push(text.slice(i, i + chunkSize));
|
|
838
|
+
}
|
|
839
|
+
return chunks.length > 0 ? chunks : [""];
|
|
840
|
+
}
|
|
841
|
+
case "prompt.context_window": {
|
|
842
|
+
const messages = args[0];
|
|
843
|
+
const maxTokens = args[1];
|
|
844
|
+
if (!Array.isArray(messages))
|
|
845
|
+
return [];
|
|
846
|
+
let budget = maxTokens;
|
|
847
|
+
const result = [];
|
|
848
|
+
for (let i = messages.length - 1; i >= 0; i--) {
|
|
849
|
+
const msg = messages[i];
|
|
850
|
+
const content = msg?.entries?.get("content") ?? "";
|
|
851
|
+
const tokens = Math.ceil(String(content).length / 4);
|
|
852
|
+
if (tokens > budget)
|
|
853
|
+
break;
|
|
854
|
+
budget -= tokens;
|
|
855
|
+
result.unshift(msg);
|
|
856
|
+
}
|
|
857
|
+
return result;
|
|
858
|
+
}
|
|
859
|
+
case "prompt.template": {
|
|
860
|
+
let text = String(args[0] ?? "");
|
|
861
|
+
const vars = args[1];
|
|
862
|
+
if (vars && typeof vars === "object" && "__map" in vars) {
|
|
863
|
+
for (const [k, v] of vars.entries) {
|
|
864
|
+
text = text.replaceAll(`<<${k}>>`, String(v ?? ""));
|
|
865
|
+
}
|
|
866
|
+
}
|
|
867
|
+
return text;
|
|
868
|
+
}
|
|
869
|
+
// --- store natives ---
|
|
870
|
+
case "store.open": {
|
|
871
|
+
const p = args[0];
|
|
872
|
+
let data = {};
|
|
873
|
+
try {
|
|
874
|
+
const raw = nodeFs.readFileSync(p, "utf-8");
|
|
875
|
+
data = JSON.parse(raw);
|
|
876
|
+
}
|
|
877
|
+
catch { /* file doesn't exist or invalid JSON — start empty */ }
|
|
878
|
+
return { __store: true, path: p, data };
|
|
879
|
+
}
|
|
880
|
+
case "store.get": {
|
|
881
|
+
const s = args[0];
|
|
882
|
+
const k = args[1];
|
|
883
|
+
return s.data[k] ?? null;
|
|
884
|
+
}
|
|
885
|
+
case "store.set": {
|
|
886
|
+
const s = args[0];
|
|
887
|
+
s.data[args[1]] = args[2];
|
|
888
|
+
nodeFs.writeFileSync(s.path, JSON.stringify(s.data, null, 2), "utf-8");
|
|
889
|
+
return args[2];
|
|
890
|
+
}
|
|
891
|
+
case "store.delete": {
|
|
892
|
+
const s = args[0];
|
|
893
|
+
const k = args[1];
|
|
894
|
+
const had = k in s.data;
|
|
895
|
+
delete s.data[k];
|
|
896
|
+
nodeFs.writeFileSync(s.path, JSON.stringify(s.data, null, 2), "utf-8");
|
|
897
|
+
return had;
|
|
898
|
+
}
|
|
899
|
+
case "store.has": {
|
|
900
|
+
const s = args[0];
|
|
901
|
+
return args[1] in s.data;
|
|
902
|
+
}
|
|
903
|
+
case "store.keys": {
|
|
904
|
+
const s = args[0];
|
|
905
|
+
return Object.keys(s.data);
|
|
906
|
+
}
|
|
907
|
+
case "store.values": {
|
|
908
|
+
const s = args[0];
|
|
909
|
+
return Object.values(s.data);
|
|
910
|
+
}
|
|
911
|
+
case "store.entries": {
|
|
912
|
+
const s = args[0];
|
|
913
|
+
return Object.entries(s.data).map(([k, v]) => ({ key: k, value: v }));
|
|
914
|
+
}
|
|
915
|
+
case "store.clear": {
|
|
916
|
+
const s = args[0];
|
|
917
|
+
s.data = {};
|
|
918
|
+
nodeFs.writeFileSync(s.path, JSON.stringify(s.data, null, 2), "utf-8");
|
|
919
|
+
return true;
|
|
920
|
+
}
|
|
921
|
+
case "store.size": {
|
|
922
|
+
const s = args[0];
|
|
923
|
+
return Object.keys(s.data).length;
|
|
924
|
+
}
|
|
925
|
+
case "store.merge": {
|
|
926
|
+
const s = args[0];
|
|
927
|
+
const m = args[1];
|
|
928
|
+
if (m && typeof m === "object" && "__map" in m && m.entries instanceof Map) {
|
|
929
|
+
for (const [k, v] of m.entries) {
|
|
930
|
+
s.data[k] = v;
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
else if (m && typeof m === "object" && !Array.isArray(m)) {
|
|
934
|
+
for (const [k, v] of Object.entries(m)) {
|
|
935
|
+
if (k !== "__type" && k !== "__proto__" && k !== "__map" && k !== "entries")
|
|
936
|
+
s.data[k] = v;
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
nodeFs.writeFileSync(s.path, JSON.stringify(s.data, null, 2), "utf-8");
|
|
940
|
+
return true;
|
|
941
|
+
}
|
|
747
942
|
default: return null;
|
|
748
943
|
}
|
|
749
944
|
},
|
|
945
|
+
// --- embed/vector natives ---
|
|
946
|
+
embed_dot_product: (a, b) => {
|
|
947
|
+
if (!Array.isArray(a) || !Array.isArray(b))
|
|
948
|
+
return 0;
|
|
949
|
+
let sum = 0;
|
|
950
|
+
for (let i = 0; i < a.length; i++)
|
|
951
|
+
sum += a[i] * b[i];
|
|
952
|
+
return sum;
|
|
953
|
+
},
|
|
954
|
+
embed_magnitude: (v) => {
|
|
955
|
+
if (!Array.isArray(v))
|
|
956
|
+
return 0;
|
|
957
|
+
let sum = 0;
|
|
958
|
+
for (let i = 0; i < v.length; i++)
|
|
959
|
+
sum += v[i] * v[i];
|
|
960
|
+
return Math.sqrt(sum);
|
|
961
|
+
},
|
|
962
|
+
embed_cosine_similarity: (a, b) => {
|
|
963
|
+
if (!Array.isArray(a) || !Array.isArray(b))
|
|
964
|
+
return 0;
|
|
965
|
+
let dot = 0, magA = 0, magB = 0;
|
|
966
|
+
for (let i = 0; i < a.length; i++) {
|
|
967
|
+
const ai = a[i], bi = b[i];
|
|
968
|
+
dot += ai * bi;
|
|
969
|
+
magA += ai * ai;
|
|
970
|
+
magB += bi * bi;
|
|
971
|
+
}
|
|
972
|
+
const denom = Math.sqrt(magA) * Math.sqrt(magB);
|
|
973
|
+
return denom === 0 ? 0 : dot / denom;
|
|
974
|
+
},
|
|
975
|
+
embed_normalize: (v) => {
|
|
976
|
+
if (!Array.isArray(v))
|
|
977
|
+
return [];
|
|
978
|
+
let sum = 0;
|
|
979
|
+
for (let i = 0; i < v.length; i++)
|
|
980
|
+
sum += v[i] * v[i];
|
|
981
|
+
const mag = Math.sqrt(sum);
|
|
982
|
+
if (mag === 0)
|
|
983
|
+
return v;
|
|
984
|
+
return v.map(x => x / mag);
|
|
985
|
+
},
|
|
986
|
+
embed_euclidean_distance: (a, b) => {
|
|
987
|
+
if (!Array.isArray(a) || !Array.isArray(b))
|
|
988
|
+
return 0;
|
|
989
|
+
let sum = 0;
|
|
990
|
+
for (let i = 0; i < a.length; i++) {
|
|
991
|
+
const d = a[i] - b[i];
|
|
992
|
+
sum += d * d;
|
|
993
|
+
}
|
|
994
|
+
return Math.sqrt(sum);
|
|
995
|
+
},
|
|
996
|
+
embed_centroid: (vectors) => {
|
|
997
|
+
if (!Array.isArray(vectors) || vectors.length === 0)
|
|
998
|
+
return [];
|
|
999
|
+
const first = vectors[0];
|
|
1000
|
+
const dim = first.length;
|
|
1001
|
+
const sums = new Float64Array(dim);
|
|
1002
|
+
for (let i = 0; i < vectors.length; i++) {
|
|
1003
|
+
const v = vectors[i];
|
|
1004
|
+
for (let j = 0; j < dim; j++)
|
|
1005
|
+
sums[j] += v[j];
|
|
1006
|
+
}
|
|
1007
|
+
const n = vectors.length;
|
|
1008
|
+
const result = new Array(dim);
|
|
1009
|
+
for (let j = 0; j < dim; j++)
|
|
1010
|
+
result[j] = sums[j] / n;
|
|
1011
|
+
return result;
|
|
1012
|
+
},
|
|
1013
|
+
embed_most_similar: (query, candidates, topK) => {
|
|
1014
|
+
if (!Array.isArray(query) || !Array.isArray(candidates))
|
|
1015
|
+
return [];
|
|
1016
|
+
const k = topK;
|
|
1017
|
+
const scored = [];
|
|
1018
|
+
for (let i = 0; i < candidates.length; i++) {
|
|
1019
|
+
const c = candidates[i];
|
|
1020
|
+
const vec = c.entries.get("vector");
|
|
1021
|
+
let dot = 0, magA = 0, magB = 0;
|
|
1022
|
+
for (let j = 0; j < query.length; j++) {
|
|
1023
|
+
const qj = query[j], vj = vec[j];
|
|
1024
|
+
dot += qj * vj;
|
|
1025
|
+
magA += qj * qj;
|
|
1026
|
+
magB += vj * vj;
|
|
1027
|
+
}
|
|
1028
|
+
const denom = Math.sqrt(magA) * Math.sqrt(magB);
|
|
1029
|
+
scored.push({ score: denom === 0 ? 0 : dot / denom, idx: i });
|
|
1030
|
+
}
|
|
1031
|
+
scored.sort((a, b) => b.score - a.score);
|
|
1032
|
+
const results = [];
|
|
1033
|
+
for (let i = 0; i < Math.min(k, scored.length); i++) {
|
|
1034
|
+
const s = scored[i];
|
|
1035
|
+
const c = candidates[s.idx];
|
|
1036
|
+
const m = new Map();
|
|
1037
|
+
m.set("id", c.entries.get("id") ?? null);
|
|
1038
|
+
m.set("score", s.score);
|
|
1039
|
+
results.push({ __map: true, entries: m });
|
|
1040
|
+
}
|
|
1041
|
+
return results;
|
|
1042
|
+
},
|
|
1043
|
+
// --- file I/O (used by stdlib/io.arc) ---
|
|
1044
|
+
read: (path) => {
|
|
1045
|
+
try {
|
|
1046
|
+
return nodeFs.readFileSync(path, "utf-8");
|
|
1047
|
+
}
|
|
1048
|
+
catch {
|
|
1049
|
+
return null;
|
|
1050
|
+
}
|
|
1051
|
+
},
|
|
1052
|
+
write: (path, content) => {
|
|
1053
|
+
try {
|
|
1054
|
+
nodeFs.writeFileSync(path, content, "utf-8");
|
|
1055
|
+
return true;
|
|
1056
|
+
}
|
|
1057
|
+
catch {
|
|
1058
|
+
return false;
|
|
1059
|
+
}
|
|
1060
|
+
},
|
|
750
1061
|
};
|
|
751
1062
|
function callFn(fn, args) {
|
|
752
1063
|
if (fn && typeof fn === "object" && "__fn" in fn) {
|
|
753
1064
|
const f = fn;
|
|
754
1065
|
const fnEnv = new Env(f.closure);
|
|
755
1066
|
bindParams(f, args, fnEnv, evalExpr);
|
|
756
|
-
|
|
1067
|
+
try {
|
|
1068
|
+
return evalExpr(f.body, fnEnv);
|
|
1069
|
+
}
|
|
1070
|
+
catch (e) {
|
|
1071
|
+
if (e instanceof ReturnSignal)
|
|
1072
|
+
return e.value;
|
|
1073
|
+
throw e;
|
|
1074
|
+
}
|
|
757
1075
|
}
|
|
758
1076
|
// It might be a native function stored as a special wrapper
|
|
759
1077
|
if (typeof fn === "function")
|
|
@@ -1127,18 +1445,17 @@ function evalExpr(expr, env) {
|
|
|
1127
1445
|
const method = expr.method.toUpperCase();
|
|
1128
1446
|
const arg = evalExpr(expr.arg, env);
|
|
1129
1447
|
const url = toStr(arg);
|
|
1130
|
-
//
|
|
1448
|
+
// Real HTTP tool calls via synchronous fetch
|
|
1131
1449
|
if (["GET", "POST", "PUT", "DELETE", "PATCH"].includes(method)) {
|
|
1132
|
-
|
|
1450
|
+
let bodyArg = null;
|
|
1133
1451
|
if (expr.body) {
|
|
1134
|
-
|
|
1135
|
-
return { __map: true, entries: new Map([["status", 200], ["method", method], ["url", url], ["body", body]]) };
|
|
1452
|
+
bodyArg = evalExpr(expr.body, env);
|
|
1136
1453
|
}
|
|
1137
|
-
return
|
|
1454
|
+
return syncFetch(method, url, bodyArg);
|
|
1138
1455
|
}
|
|
1139
1456
|
// Custom tool call
|
|
1140
|
-
console.log(`[
|
|
1141
|
-
return `
|
|
1457
|
+
console.log(`[tool @${expr.method}(${url})]`);
|
|
1458
|
+
return `result-from-${expr.method}`;
|
|
1142
1459
|
}
|
|
1143
1460
|
case "AsyncExpr": {
|
|
1144
1461
|
const capturedEnv = env;
|
package/dist/lexer.js
CHANGED
|
@@ -138,8 +138,9 @@ export function lex(source) {
|
|
|
138
138
|
let hasInterp = false;
|
|
139
139
|
while (i < source.length && peek() !== '"') {
|
|
140
140
|
if (peek() === "\n") {
|
|
141
|
-
//
|
|
142
|
-
|
|
141
|
+
// Allow multiline strings
|
|
142
|
+
str += advance();
|
|
143
|
+
continue;
|
|
143
144
|
}
|
|
144
145
|
if (peek() === "{") {
|
|
145
146
|
hasInterp = true;
|
package/dist/modules.js
CHANGED
|
@@ -109,11 +109,18 @@ export function handleUse(stmt, env, currentFile) {
|
|
|
109
109
|
}
|
|
110
110
|
}
|
|
111
111
|
else {
|
|
112
|
-
// No selective imports: bind all exports
|
|
112
|
+
// No selective imports: bind all exports flat
|
|
113
113
|
for (const [name, value] of Object.entries(exports)) {
|
|
114
114
|
env.set(name, value);
|
|
115
115
|
}
|
|
116
116
|
}
|
|
117
|
+
// Also create a namespace object so `module.fn()` style access works
|
|
118
|
+
const nsName = stmt.path[stmt.path.length - 1];
|
|
119
|
+
const entries = new Map();
|
|
120
|
+
for (const [name, value] of Object.entries(exports)) {
|
|
121
|
+
entries.set(name, value);
|
|
122
|
+
}
|
|
123
|
+
env.set(nsName, { __map: true, entries });
|
|
117
124
|
}
|
|
118
125
|
/**
|
|
119
126
|
* Create a UseHandler bound to a specific file path.
|
package/dist/parser.js
CHANGED
|
@@ -518,7 +518,7 @@ export class Parser {
|
|
|
518
518
|
case TokenType.Slash:
|
|
519
519
|
case TokenType.Percent: return 6;
|
|
520
520
|
case TokenType.Power: return 7;
|
|
521
|
-
case TokenType.Range: return
|
|
521
|
+
case TokenType.Range: return 4;
|
|
522
522
|
case TokenType.Dot:
|
|
523
523
|
case TokenType.LBracket:
|
|
524
524
|
case TokenType.LParen:
|
package/dist/version.d.ts
CHANGED
package/dist/version.js
CHANGED