tensorgrad 0.0.15 → 0.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +154 -193
- package/dist/index.js +2208 -39
- package/dist/index.js.map +7 -1
- package/dist/worker.debug.js +553 -0
- package/package.json +60 -58
- package/src/adam.ts +69 -15
- package/src/compile.ts +334 -156
- package/src/index.ts +8 -4
- package/src/module.ts +72 -34
- package/src/worker-protocol.ts +183 -0
- package/src/worker-proxy.ts +76 -0
- package/src/worker.ts +281 -0
- package/dist/adam.js +0 -111
- package/dist/adam.js.map +0 -1
- package/dist/buffers.js +0 -120
- package/dist/buffers.js.map +0 -1
- package/dist/capture.js +0 -33
- package/dist/capture.js.map +0 -1
- package/dist/codegen.js +0 -724
- package/dist/codegen.js.map +0 -1
- package/dist/compile.js +0 -184
- package/dist/compile.js.map +0 -1
- package/dist/grad.js +0 -380
- package/dist/grad.js.map +0 -1
- package/dist/ir.js +0 -60
- package/dist/ir.js.map +0 -1
- package/dist/module.js +0 -155
- package/dist/module.js.map +0 -1
- package/dist/nn.js +0 -135
- package/dist/nn.js.map +0 -1
- package/dist/ops.js +0 -326
- package/dist/ops.js.map +0 -1
- package/dist/runtime.js +0 -402
- package/dist/runtime.js.map +0 -1
- package/dist/shape.js +0 -259
- package/dist/shape.js.map +0 -1
- package/dist/trace.js +0 -100
- package/dist/trace.js.map +0 -1
package/dist/codegen.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"codegen.js","sourceRoot":"","sources":["../src/codegen.ts"],"names":[],"mappings":"AAAA,sCAAsC;AACtC,EAAE;AACF,0EAA0E;AAC1E,yEAAyE;AACzE,0EAA0E;AAC1E,wEAAwE;AACxE,uCAAuC;AACvC,EAAE;AACF,4EAA4E;AAC5E,gFAAgF;AAChF,4BAA4B;AAI5B,OAAO,EAAE,SAAS,EAAE,MAAM,YAAY,CAAA;AAEtC,sEAAsE;AACtE,+EAA+E;AAC/E,mFAAmF;AACnF,MAAM,OAAO,GAAG,GAAG,CAAA;AAEnB,6EAA6E;AAC7E,4EAA4E;AAC5E,4EAA4E;AAC5E,yEAAyE;AACzE,wEAAwE;AACxE,0CAA0C;AAC1C,MAAM,QAAQ,GAAG,oCAAoC,CAAA;AAqBrD,+EAA+E;AAC/E,qBAAqB;AACrB,+EAA+E;AAE/E,6EAA6E;AAC7E,MAAM,UAAU,WAAW,CAAC,KAAY,EAAE,IAAgB;IACxD,MAAM,GAAG,GAAiB,EAAE,CAAA;IAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAC1C,MAAM,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAE,CAAA;QACxB,MAAM,IAAI,GAAG,UAAU,CAAC,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;QAC3C,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;IAChB,CAAC;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,SAAS,UAAU,CAAC,EAAU,EAAE,KAAY,EAAE,IAAgB,EAAE,OAAe;IAC7E,MAAM,GAAG,GAAG,CAAC,EAAU,EAAE,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE,CAAE,CAAA;IAC9C,MAAM,GAAG,GAAG,CAAC,QAAgB,EAAE,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,QAAQ,CAAE,CAAA;IACpE,MAAM,KAAK,GAAG,GAAe,EAAE,CAAC,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE,EAAE,EAAE,OAAO,EAAE,CAAC,EAAE,aAAa,EAAE,OAAO,EAAE,CAAC,CAAA;IAE1H,QAAQ,EAAE,CAAC,IAAI,EAAE,CAAC;QAChB,4EAA4E;QAC5E,KAAK,aAAa,CAAC;QACnB,KAAK,cAAc,CAAC;QACpB,KAAK,aAAa;YAChB,OAAO,KAAK,EAAE,CAAA;QAEhB,4EAA4E;QAC5E,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,IAAI,GAAG;6DAC0C,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,EAAE,CAAC,CAAC;aACJ,WAAW,CAAC,QAAQ,EAAE,GAAG,CAAC,KAAK,CAAC;EAC3C,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,CAAC,CAAC,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAC3G,CAAC;QACD,KAAK,cAAc,CAAC,CAAC,CAAC;YACpB,MAAM,IAAI,GAAG;6DAC0C,SAAS,CAAC,EAAE,CAAC,KAAK,CAAC;;;aAGnE,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC;EAC1C,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,aAAa,EAAE,CAAC,EAAE,CAAA;QAClG,CAAC;QAED,2EAA2E;QAC3E,KAAK,KAAK,CAAC;QACX,KAAK,KAAK,CAAC;QACX,KAAK,KAAK,CAAC;QACX,KAAK,KAAK,CAAC,CAAC,CAAC;YACX,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,KAAK,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAA;YACjE,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,IAAI,GAAG;qDACkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;qDAClB,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;6DACV,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;EAChB,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,MAAM,CAAC;EACpD,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,MAAM,CAAC;qBACjC,KAAK;EACxB,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAClI,CAAC;QAED,2EAA2E;QAC3E,KAAK,YAAY,CAAC;QAClB,KAAK,YAAY,CAAC,CAAC,CAAC;YAClB,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,KAAK,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAA;YAClD,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,GAAG,GAAG,WAAW,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,CAAC,KAAK,CAAC,CAAA;YAC7C,MAAM,IAAI,GAAG;qDACkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;6DACV,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;kBACA,KAAK,IAAI,GAAG;EAC5B,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACvH,CAAC;QAED,2EAA2E;QAC3E,KAAK,MAAM,CAAC;QACZ,KAAK,OAAO,CAAC;QACb,KAAK,KAAK,CAAC;QACX,KAAK,KAAK,CAAC;QACX,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,IAAI,GACR,EAAE,CAAC,IAAI,KAAK,MAAM,CAAE,CAAC,CAAC,SAAS,CAAC,CAAC;gBACjC,EAAE,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC;oBACvC,EAAE,CAAC,IAAI,KAAK,KAAK,CAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;wBAChC,EAAE,CAAC,IAAI,KAAK,KAAK,CAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;4BAChC,UAAU,CAAY,aAAa,CAAA;YACrC,MAAM,IAAI,GAAG;qDACkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;6DACV,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;;aAEL,IAAI;EACf,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACvH,CAAC;QAED,yEAAyE;QACzE,KAAK,MAAM,CAAC;QACZ,KAAK,SAAS,CAAC,CAAC,CAAC;YACf,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAA;YAC5C,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,gEAAgE;YAChE,MAAM,IAAI,GAAG;qDACkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;qDAClB,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;;2BAE5C,OAAO;;IAE9B,QAAQ;aACC,KAAK;EAChB,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,MAAM,CAAC;EACpD,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,MAAM,CAAC;oCAClB,KAAK;EACvC,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAClI,CAAC;QACD,KAAK,OAAO,CAAC,CAAC,CAAC;YACb,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,IAAI,GAAG,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,CAAA;YACzB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,IAAI,GAAG;;qDAEkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;qDAClB,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;6DACV,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;EAChB,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC;EACvD,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,MAAM,CAAC;EACpD,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,MAAM,CAAC;;EAEpD,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAChJ,CAAC;QAED,KAAK,WAAW,CAAC,CAAC,CAAC;YACjB,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,IAAI,GAAG;;;;2BAIQ,OAAO;;IAE9B,QAAQ;aACC,KAAK;;EAEhB,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACnI,CAAC;QAED,2EAA2E;QAC3E,KAAK,WAAW,CAAC;QACjB,KAAK,UAAU,CAAC,CAAC,CAAC;YAChB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACtC,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YACxC,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAA;YAC9D,MAAM,IAAI,GAAG;;;2BAGQ,OAAO;;IAE9B,QAAQ;aACC,SAAS;mBACH,CAAC;;+BAEW,CAAC;;;iBAGf,OAAO;EACtB,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAC3H,CAAC;QAED,6EAA6E;QAC7E,0EAA0E;QAC1E,4EAA4E;QAC5E,4EAA4E;QAC5E,KAAK,SAAS,CAAC,CAAC,CAAC;YACf,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,IAAI,GAAG;qDACkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;6DACV,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;;EAEhB,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACvH,CAAC;QAED,KAAK,WAAW,CAAC,CAAC,CAAC;YACjB,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,2EAA2E;YAC3E,gFAAgF;YAChF,yEAAyE;YACzE,MAAM,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;YACxC,MAAM,WAAW,GAAG,uBAAuB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAA;YACnE,MAAM,OAAO,GAAa,EAAE,CAAA;YAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACxC,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA,CAAE,2CAA2C;gBAC/E,OAAO,CAAC,IAAI,CAAC,QAAQ,OAAO,MAAM,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;YACnD,CAAC;YACD,MAAM,IAAI,GAAG;qDACkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;6DACV,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;EAChB,WAAW;iBACI,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC;;EAElC,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACvH,CAAC;QAED,2EAA2E;QAC3E,mEAAmE;QACnE,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACtC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACtC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAE,CAAA;YACrB,MAAM,KAAK,GAAG,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;YAC1C,MAAM,KAAK,GAAG,KAAK,GAAG,CAAC,GAAG,CAAC,CAAA;YAC3B,MAAM,IAAI,GAAG;;;;2BAIQ,OAAO;;IAE9B,QAAQ;aACC,KAAK;iBACD,CAAC,GAAG,CAAC;iBACL,CAAC,GAAG,CAAC;iBACL,CAAC;iBACD,CAAC;qBACG,CAAC,GAAG,CAAC,WAAW,CAAC;;+BAEP,CAAC;mCACG,CAAC;;;EAGlC,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAClI,CAAC;QAED,KAAK,gBAAgB,CAAC,CAAC,CAAC;YACtB,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACtC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACtC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACtC,MAAM,KAAK,GAAG,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;YAC1C,MAAM,KAAK,GAAG,KAAK,GAAG,CAAC,GAAG,CAAC,CAAA;YAC3B,MAAM,IAAI,GAAG;;;;2BAIQ,OAAO;;IAE9B,QAAQ;aACC,KAAK;iBACD,CAAC,GAAG,CAAC;iBACL,CAAC,GAAG,CAAC;iBACL,CAAC;iBACD,CAAC;qBACG,CAAC,GAAG,CAAC,WAAW,CAAC;qBACjB,CAAC,GAAG,CAAC;;+BAEK,CAAC;2CACW,CAAC;;;EAG1C,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAClI,CAAC;QAED,4EAA4E;QAC5E,KAAK,SAAS,CAAC,CAAC,CAAC;YACf,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,OAAO,GAAG,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,CAAA;YAC/B,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,KAAK,GAAG,EAAE,CAAC,KAAK,CAAA;YACtB,MAAM,OAAO,GAAG,WAAW,CAAC,CAAC,EAAE,GAAG,CAAC,KAAK,CAAC,CAAA;YACzC,MAAM,MAAM,GAAG,WAAW,CAAC,CAAC,EAAE,GAAG,CAAC,KAAK,CAAC,CAAA;YACxC,MAAM,IAAI,GAAG;;6DAE0C,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;uBACK,KAAK;uBACL,KAAK;;oBAER,OAAO,KAAK,MAAM;EACpC,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAC7H,CAAC;QAED,2EAA2E;QAC3E,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACtC,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YACxC,MAAM,IAAI,GAAG;;;2BAGQ,OAAO;;IAE9B,QAAQ;aACC,SAAS;mBACH,CAAC;;+BAEW,CAAC;;;;;+BAKD,CAAC;;;;+BAID,CAAC;;;EAG9B,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAC3H,CAAC;QAED,KAAK,qBAAqB,CAAC,CAAC,CAAC;YAC3B,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA,CAAE,6BAA6B;YACrE,uEAAuE;YACvE,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YACxC,MAAM,IAAI,GAAG;;;2BAGQ,OAAO;;;IAG9B,QAAQ;aACC,SAAS;mBACH,CAAC;mBACD,CAAC;;;;;;;;;;;;;;;sCAekB,CAAC;;;EAGrC,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAC3H,CAAC;QAED,KAAK,cAAc,CAAC,CAAC,CAAC;YACpB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACtC,MAAM,KAAK,GAAG,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;YAChC,MAAM,OAAO,GAAG,WAAW,CAAC,EAAE,CAAC,SAAS,EAAE,KAAK,CAAC,CAAA;YAChD,MAAM,IAAI,GAAG;;;2BAGQ,OAAO;;IAE9B,QAAQ;aACC,KAAK;mBACC,CAAC;oBACA,CAAC,QAAQ,CAAC;;eAEf,OAAO;;;;EAIpB,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACvH,CAAC;QAED,2EAA2E;QAC3E,KAAK,kBAAkB,CAAC,CAAC,CAAC;YACxB,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,IAAI,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA;YACzC,MAAM,KAAK,GAAG,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,KAAK,CAAA;YAC/B,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,IAAI,GAAG;qDACkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;6DACV,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;oBACE,KAAK;oBACL,KAAK;uBACF,IAAI,OAAO,EAAE,CAAC,KAAK;EACxC,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACvH,CAAC;QAED,2EAA2E;QAC3E,KAAK,cAAc,CAAC,CAAC,CAAC;YACpB,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,IAAI,GAAG;qDACkC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;6DACV,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC;2BACtD,OAAO;;IAE9B,QAAQ;aACC,KAAK;EAChB,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,QAAQ,CAAC;;EAEtD,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACvH,CAAC;QAED,0EAA0E;QAC1E,KAAK,eAAe,CAAC,CAAC,CAAC;YACrB,gCAAgC;YAChC,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,EAAE,GAAG,EAAE,CAAC,EAAE,CAAA;YAChB,MAAM,UAAU,GAAG,CAAC,GAAG,EAAE,CAAA;YACzB,MAAM,IAAI,GAAG;;;;2BAIQ,OAAO;;IAE9B,QAAQ;aACC,KAAK;aACL,WAAW,CAAC,EAAE,EAAE,KAAK,CAAC,aAAa,WAAW,CAAC,UAAU,EAAE,KAAK,CAAC;EAC5E,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAClI,CAAC;QACD,KAAK,eAAe,CAAC,CAAC,CAAC;YACrB,iCAAiC;YACjC,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,EAAE,GAAG,EAAE,CAAC,EAAE,CAAA;YAChB,MAAM,UAAU,GAAG,CAAC,GAAG,EAAE,CAAA;YACzB,MAAM,IAAI,GAAG;;;;2BAIQ,OAAO;;IAE9B,QAAQ;aACC,KAAK;;aAEL,WAAW,CAAC,EAAE,EAAE,KAAK,CAAC,aAAa,WAAW,CAAC,UAAU,EAAE,KAAK,CAAC;EAC5E,CAAC,IAAI,EAAE,CAAA;YACH,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAClI,CAAC;QACD,KAAK,eAAe,CAAC,CAAC,CAAC;YACrB,kEAAkE;YAClE,wEAAwE;YACxE,sEAAsE;YACtE,iEAAiE;YACjE,uEAAuE;YACvE,oCAAoC;YACpC,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,aAAa,GAAG,EAAE,CAAC,iBAAiB,KAAK,IAAI,CAAA;YACnD,MAAM,UAAU,GAAG,aAAa,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,WAAW,EAAE,KAAK,CAAC,CAAA;YACxF,MAAM,aAAa,GAAG,aAAa;gBACjC,CAAC,CAAC,sEAAsE;oBACtE,kEAAkE;gBACpE,CAAC,CAAC,kEAAkE,CAAA;YACtE,MAAM,IAAI,GAAG;;;;;EAKjB,aAAa;2BACY,OAAO;;IAE9B,QAAQ;aACC,KAAK;aACL,UAAU,iDAAiD,WAAW,CAAC,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC;EAChG,CAAC,IAAI,EAAE,CAAA;YACH,MAAM,QAAQ,GAAG,aAAa;gBAC5B,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,iBAAkB,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC;gBAC/F,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAA;YACrE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QAC7F,CAAC;QAED,KAAK,cAAc,CAAC,CAAC,CAAC;YACpB,yEAAyE;YACzE,yEAAyE;YACzE,MAAM,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YACvB,MAAM,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YACnB,MAAM,IAAI,GAAG,cAAc,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAA;YACxD,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;YAClC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,CAAA;QACvH,CAAC;IACH,CAAC;AACH,CAAC;AAED,+EAA+E;AAC/E,eAAe;AACf,+EAA+E;AAE/E,SAAS,SAAS,CAAC,CAAyB;IAC1C,2EAA2E;IAC3E,yEAAyE;IACzE,2EAA2E;IAC3E,qEAAqE;IACrE,IAAI,CAAC,KAAK,MAAM;QAAE,OAAO,KAAK,CAAA;IAC9B,OAAO,CAAC,CAAA;AACV,CAAC;AAED,SAAS,WAAW,CAAC,KAAa,EAAE,KAA6B;IAC/D,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,IAAI,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;YAC3B,2DAA2D;YAC3D,OAAO,KAAK,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACrE,CAAC,CAAC,GAAG,KAAK,GAAG;gBACb,CAAC,CAAC,GAAG,KAAK,KAAK,CAAA;QACnB,CAAC;QACD,OAAO,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAA;IAC3C,CAAC;IACD,IAAI,KAAK,KAAK,KAAK;QAAE,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAA;IACnD,OAAO,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;AAC5B,CAAC;AAED,SAAS,WAAW,CAAC,IAAY,EAAE,KAA6B;IAC9D,IAAI,KAAK,KAAK,KAAK;QAAE,OAAO,OAAO,IAAI,GAAG,CAAA;IAC1C,IAAI,KAAK,KAAK,KAAK;QAAE,OAAO,OAAO,IAAI,GAAG,CAAA;IAC1C,OAAO,OAAO,IAAI,GAAG,CAAA;AACvB,CAAC;AAED,SAAS,cAAc,CAAC,KAAY;IAClC,MAAM,OAAO,GAAa,IAAI,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACzD,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC3C,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,GAAG,CAAC,CAAE,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAE,CAAA;IAC9C,CAAC;IACD,OAAO,OAAO,CAAA;AAChB,CAAC;AAED;;;GAGG;AACH,SAAS,uBAAuB,CAAC,OAAe,EAAE,KAAY,EAAE,MAAc;IAC5E,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,SAAS,MAAM,gBAAgB,CAAA,CAAE,2BAA2B;IAC3F,MAAM,OAAO,GAAG,cAAc,CAAC,KAAK,CAAC,CAAA;IACrC,MAAM,KAAK,GAAa,EAAE,CAAA;IAC1B,IAAI,SAAS,GAAG,OAAO,CAAA;IACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACtC,IAAI,CAAC,KAAK,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC3B,KAAK,CAAC,IAAI,CAAC,SAAS,MAAM,IAAI,CAAC,MAAM,SAAS,GAAG,CAAC,CAAA;QACpD,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,IAAI,CAAC,SAAS,MAAM,IAAI,CAAC,MAAM,SAAS,MAAM,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAA;YACnE,MAAM,MAAM,GAAG,GAAG,MAAM,OAAO,CAAC,EAAE,CAAA;YAClC,KAAK,CAAC,IAAI,CAAC,SAAS,MAAM,MAAM,SAAS,MAAM,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAA;YAC9D,SAAS,GAAG,MAAM,CAAA;QACpB,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;AACzB,CAAC;AAED;;;;;;;;;;;GAWG;AACH,SAAS,mBAAmB,CAAC,OAAe,EAAE,QAAe,EAAE,QAAe,EAAE,MAAc;IAC5F,kEAAkE;IAClE,qEAAqE;IACrE,MAAM,MAAM,GAAG,GAAG,MAAM,KAAK,CAAA;IAC7B,MAAM,SAAS,GAAG,uBAAuB,CAAC,OAAO,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAA;IACpE,MAAM,MAAM,GAAG,QAAQ,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;IAChD,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC1B,OAAO,GAAG,SAAS,WAAW,MAAM,cAAc,CAAA;IACpD,CAAC;IACD,MAAM,UAAU,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAA;IAC3C,MAAM,KAAK,GAAa,EAAE,CAAA;IAC1B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACzC,MAAM,OAAO,GAAG,CAAC,GAAG,MAAM,CAAA;QAC1B,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAE,CAAA;QAC3B,MAAM,IAAI,GAAG,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,MAAM,IAAI,OAAO,MAAM,UAAU,CAAC,CAAC,CAAC,GAAG,CAAA;QAC7E,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;IAClB,CAAC;IACD,OAAO,GAAG,SAAS,WAAW,MAAM,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAA;AAChE,CAAC;AAED;;;;;;;;;GASG;AACH,SAAS,cAAc,CAAC,QAAe,EAAE,QAAe,EAAE,KAA6B;IACrF,MAAM,UAAU,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAA;IAC3C,MAAM,UAAU,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAA;IAC3C,MAAM,MAAM,GAAG,QAAQ,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;IAEhD,4DAA4D;IAC5D,MAAM,SAAS,GAAG,uBAAuB,CAAC,GAAG,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;IAE/D,0EAA0E;IAC1E,2EAA2E;IAC3E,yEAAyE;IACzE,6CAA6C;IAC7C,MAAM,WAAW,GAAa,EAAE,CAAA;IAChC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACzC,IAAI,CAAC,GAAG,MAAM,EAAE,CAAC;YAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YAAC,SAAQ;QAAC,CAAC;QACjD,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,GAAG,MAAM,CAAE,CAAA;QAClC,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAE,CAAA;QACzB,IAAI,IAAI,KAAK,CAAC,IAAI,IAAI,GAAG,CAAC;YAAE,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjD,CAAC;IAED,gFAAgF;IAChF,MAAM,SAAS,GAAa,EAAE,CAAA;IAC9B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACzC,IAAI,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;YAAE,SAAQ,CAAE,kCAAkC;QACzE,MAAM,KAAK,GAAG,CAAC,GAAG,MAAM,CAAA;QACxB,SAAS,CAAC,IAAI,CAAC,OAAO,KAAK,MAAM,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;IACpD,CAAC;IACD,MAAM,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IAEpE,+CAA+C;IAC/C,MAAM,MAAM,GAAG,CAAC,KAAa,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,CAAA;IACxD,MAAM,KAAK,GAAa,EAAE,CAAA;IAC1B,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,WAAW,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;QACxD,MAAM,CAAC,GAAG,WAAW,CAAC,KAAK,CAAE,CAAA;QAC7B,MAAM,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAE,CAAA;QACxB,KAAK,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,iBAAiB,CAAC,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;IACjG,CAAC;IACD,+CAA+C;IAC/C,MAAM,YAAY,GAAG,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;IACtE,MAAM,QAAQ,GAAG,YAAY,CAAC,MAAM,GAAG,CAAC;QACtC,CAAC,CAAC,GAAG,QAAQ,MAAM,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;QAC7C,CAAC,CAAC,QAAQ,CAAA;IACZ,KAAK,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,aAAa,QAAQ,IAAI,CAAC,CAAA;IAClE,KAAK,IAAI,KAAK,GAAG,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,KAAK,IAAI,CAAC,EAAE,KAAK,EAAE,EAAE,CAAC;QAC7D,KAAK,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IACjC,CAAC;IAED,MAAM,KAAK,GAAG,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAE,GAAG,QAAQ,CAAC,CAAC,CAAE,CAAC,CAAA;IACzE,MAAM,QAAQ,GAAG,WAAW,CAAC,MAAM,KAAK,CAAC;QACvC,CAAC,CAAC,eAAe,QAAQ,IAAI;QAC7B,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;IAEpB,OAAO;qDAC4C,SAAS,CAAC,KAAK,CAAC;6DACR,SAAS,CAAC,KAAK,CAAC;2BAClD,OAAO;;IAE9B,QAAQ;aACC,KAAK;EAChB,SAAS;YACC,SAAS,CAAC,KAAK,CAAC,MAAM,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;EAC1F,QAAQ;;EAER,CAAC,IAAI,EAAE,CAAA;AACT,CAAC"}
|
package/dist/compile.js
DELETED
|
@@ -1,184 +0,0 @@
|
|
|
1
|
-
// Top-level compile(): trace → autograd → buffer plan → codegen → runtime.
|
|
2
|
-
//
|
|
3
|
-
// Two entry points:
|
|
4
|
-
// * `compile(traceFn)` — low-level. User declares params via
|
|
5
|
-
// paramInput() inside the trace.
|
|
6
|
-
// * `compileModule(model, …)` — high-level. User defines the model as a
|
|
7
|
-
// Module tree; the library auto-discovers
|
|
8
|
-
// params, traces the forward, appends grad
|
|
9
|
-
// and Adam, and returns a runtime.
|
|
10
|
-
import { trace, tensorInput } from './trace.js';
|
|
11
|
-
import { appendGrad } from './grad.js';
|
|
12
|
-
import { appendAdam } from './adam.js';
|
|
13
|
-
import { planBuffers } from './buffers.js';
|
|
14
|
-
import { emitKernels } from './codegen.js';
|
|
15
|
-
import { createRuntime, createForwardRuntime } from './runtime.js';
|
|
16
|
-
import { Module, materializeParams } from './module.js';
|
|
17
|
-
/** Trace + autograd + buffer-plan + codegen, without touching WebGPU. */
|
|
18
|
-
export function compileToIR(traceFn) {
|
|
19
|
-
const graph = trace(traceFn);
|
|
20
|
-
const { paramGrads, loss } = appendGrad(graph);
|
|
21
|
-
const plan = planBuffers(graph, paramGrads);
|
|
22
|
-
const kernels = emitKernels(graph, plan);
|
|
23
|
-
return { graph, paramGrads, loss, plan, kernels };
|
|
24
|
-
}
|
|
25
|
-
/** Full compile pipeline. Browser-only because it creates a GPUDevice. */
|
|
26
|
-
export async function compile(traceFn, opts = {}) {
|
|
27
|
-
const ir = compileToIR(traceFn);
|
|
28
|
-
const lossBufferId = ir.plan.tensorToBuffer.get(ir.loss.id);
|
|
29
|
-
const runtime = await createRuntime(ir.plan, ir.kernels, lossBufferId, opts);
|
|
30
|
-
return Object.assign(runtime, { ir });
|
|
31
|
-
}
|
|
32
|
-
/**
|
|
33
|
-
* Compile a Module-based model. Pass a *factory* `() => new Model()`, not the
|
|
34
|
-
* model instance itself: compilation mutates the tree (every `ParamSentinel`
|
|
35
|
-
* field becomes a real `Tensor`), so the instance is consumed and shouldn't be
|
|
36
|
-
* referenced afterwards. Re-call the factory if you need a fresh tree.
|
|
37
|
-
*
|
|
38
|
-
* The forward function takes the materialized model and a Record of named
|
|
39
|
-
* input tensors, returns the loss tensor. Inputs are matched by name with the
|
|
40
|
-
* `inputs:` declaration:
|
|
41
|
-
*
|
|
42
|
-
* inputs: {
|
|
43
|
-
* tokens: { shape: [B, T], dtype: 'i32' },
|
|
44
|
-
* targets: { shape: [B, T], dtype: 'i32' },
|
|
45
|
-
* }
|
|
46
|
-
* forward: (m, { tokens, targets }) => …
|
|
47
|
-
*
|
|
48
|
-
* Walks the module tree to materialize params with auto-derived names, then
|
|
49
|
-
* runs trace → grad → adam → buffer plan → codegen → runtime. Initial
|
|
50
|
-
* parameter values are uploaded automatically before this function returns;
|
|
51
|
-
* call `reset()` later to re-randomize.
|
|
52
|
-
*
|
|
53
|
-
* If `opts.adam` is set, the runtime's `step()` automatically tracks an
|
|
54
|
-
* internal step count and injects the bias-corrected `lrt` scalar each call;
|
|
55
|
-
* users don't need to provide it themselves.
|
|
56
|
-
*/
|
|
57
|
-
export async function compileModule(modelFactory, forward, opts = {}) {
|
|
58
|
-
const { graph, materialized } = traceModule(modelFactory, forward, opts.inputs ?? {});
|
|
59
|
-
const { paramGrads, loss } = appendGrad(graph);
|
|
60
|
-
const adamResult = opts.adam
|
|
61
|
-
? appendAdam(graph, paramGrads, materialized.tensors, opts.adam, materialized.decayFlags)
|
|
62
|
-
: undefined;
|
|
63
|
-
const plan = planBuffers(graph, paramGrads, adamResult?.writebacks ?? []);
|
|
64
|
-
const kernels = emitKernels(graph, plan);
|
|
65
|
-
const lossBufferId = plan.tensorToBuffer.get(loss.id);
|
|
66
|
-
const runtime = await createRuntime(plan, kernels, lossBufferId, opts);
|
|
67
|
-
if (adamResult)
|
|
68
|
-
wrapStepForAdam(runtime, adamResult);
|
|
69
|
-
uploadInitialParams(plan, materialized.initFns, runtime, /* sharedParams */ undefined);
|
|
70
|
-
const ir = { graph, paramGrads, loss, plan, kernels };
|
|
71
|
-
const kernelCount = countKernels(kernels);
|
|
72
|
-
const reset = () => {
|
|
73
|
-
uploadInitialParams(plan, materialized.initFns, runtime, undefined);
|
|
74
|
-
runtime.resetOptimizerState();
|
|
75
|
-
};
|
|
76
|
-
const compileForwardMethod = (forwardFn, fOpts = {}) => compileForward(modelFactory, forwardFn, {
|
|
77
|
-
...fOpts,
|
|
78
|
-
device: runtime.device,
|
|
79
|
-
sharedParams: runtime.params,
|
|
80
|
-
});
|
|
81
|
-
return Object.assign(runtime, { ir, kernelCount, reset, compileForward: compileForwardMethod });
|
|
82
|
-
}
|
|
83
|
-
// ============================================================================
|
|
84
|
-
// Forward-only compile
|
|
85
|
-
// ============================================================================
|
|
86
|
-
/**
|
|
87
|
-
* Compile a Module-based model in forward-only mode (no autograd, no Adam).
|
|
88
|
-
* The forward function returns the output tensor (e.g., logits) instead of a
|
|
89
|
-
* scalar loss; runtime exposes `run(inputs)` returning the full output as a
|
|
90
|
-
* `Float32Array`.
|
|
91
|
-
*
|
|
92
|
-
* **Prefer the `compileForward` method on a training runtime** when both
|
|
93
|
-
* graphs use the same Module class — it auto-supplies `device` and
|
|
94
|
-
* `sharedParams`. This standalone form is for forward-only models with no
|
|
95
|
-
* training graph at all, or for sharing params across a different model.
|
|
96
|
-
*
|
|
97
|
-
* **Sharing params with a training compile.** Pass `opts.sharedParams =
|
|
98
|
-
* trainCompiled.params` to bind this graph's param buffers to an existing
|
|
99
|
-
* training runtime's GPU buffers — every train step is then immediately
|
|
100
|
-
* visible to `run()` calls here, no copies.
|
|
101
|
-
*
|
|
102
|
-
* Initial param values are uploaded automatically for params *not* covered
|
|
103
|
-
* by `sharedParams` (those are owned by the sibling compile).
|
|
104
|
-
*/
|
|
105
|
-
export async function compileForward(modelFactory, forward, opts = {}) {
|
|
106
|
-
const { graph, materialized } = traceModule(modelFactory, forward, opts.inputs ?? {});
|
|
107
|
-
const outputTensor = graph.tensors[graph.outputs[0]];
|
|
108
|
-
const plan = planBuffers(graph, /* paramGrads */ {});
|
|
109
|
-
const kernels = emitKernels(graph, plan);
|
|
110
|
-
const outputBufferId = plan.tensorToBuffer.get(outputTensor.id);
|
|
111
|
-
const runtime = await createForwardRuntime(plan, kernels, outputBufferId, opts);
|
|
112
|
-
uploadInitialParams(plan, materialized.initFns, runtime, opts.sharedParams);
|
|
113
|
-
const ir = { graph, paramGrads: {}, loss: outputTensor, plan, kernels };
|
|
114
|
-
return Object.assign(runtime, { ir, kernelCount: countKernels(kernels) });
|
|
115
|
-
}
|
|
116
|
-
/** Trace the forward function with a fresh model + tensor inputs and capture
|
|
117
|
-
* the materialized params. Shared by both compile entry points; everything
|
|
118
|
-
* past this point (grad/adam/buffer plan/runtime) diverges. */
|
|
119
|
-
function traceModule(modelFactory, forward, inputDecls) {
|
|
120
|
-
const model = modelFactory();
|
|
121
|
-
let materialized = { tensors: {}, initFns: {}, decayFlags: {} };
|
|
122
|
-
const graph = trace(() => {
|
|
123
|
-
materialized = materializeParams(model);
|
|
124
|
-
const inputTensors = {};
|
|
125
|
-
for (const [name, decl] of Object.entries(inputDecls)) {
|
|
126
|
-
inputTensors[name] = tensorInput(name, decl.shape, decl.dtype ?? 'f32');
|
|
127
|
-
}
|
|
128
|
-
return forward(model, inputTensors);
|
|
129
|
-
});
|
|
130
|
-
return { graph, materialized };
|
|
131
|
-
}
|
|
132
|
-
const countKernels = (kernels) => kernels.filter(k => k.wgsl).length;
|
|
133
|
-
/** Wrap the runtime's step() to inject Adam's per-step `lrt` (bias-corrected
|
|
134
|
-
* effective LR) and, when the user supplied a per-step lr schedule, the
|
|
135
|
-
* decayShrink scalar. Also wraps resetOptimizerState() so a reset zeros
|
|
136
|
-
* Adam's m/v *and* the bias-correction step counter — otherwise the next
|
|
137
|
-
* step would skip Adam's warmup phase. */
|
|
138
|
-
function wrapStepForAdam(runtime, adamResult) {
|
|
139
|
-
const { lrtInputName, decayShrinkInputName, config } = adamResult;
|
|
140
|
-
let t = 0;
|
|
141
|
-
const lrtBuf = new Float32Array(1);
|
|
142
|
-
const decayShrinkBuf = decayShrinkInputName ? new Float32Array(1) : null;
|
|
143
|
-
const innerStep = runtime.step.bind(runtime);
|
|
144
|
-
const innerReset = runtime.resetOptimizerState.bind(runtime);
|
|
145
|
-
const wrappedStep = ((inputs, opts) => {
|
|
146
|
-
t++;
|
|
147
|
-
const lrNow = config.lr(t);
|
|
148
|
-
lrtBuf[0] = lrNow * Math.sqrt(1 - Math.pow(config.b2, t)) / (1 - Math.pow(config.b1, t));
|
|
149
|
-
const merged = { ...inputs, [lrtInputName]: lrtBuf };
|
|
150
|
-
if (decayShrinkBuf && decayShrinkInputName) {
|
|
151
|
-
decayShrinkBuf[0] = 1 - lrNow * config.weightDecay;
|
|
152
|
-
merged[decayShrinkInputName] = decayShrinkBuf;
|
|
153
|
-
}
|
|
154
|
-
if (opts?.readLoss === false)
|
|
155
|
-
return innerStep(merged, { readLoss: false });
|
|
156
|
-
if (opts?.withCaptures)
|
|
157
|
-
return innerStep(merged, { withCaptures: true });
|
|
158
|
-
return innerStep(merged);
|
|
159
|
-
});
|
|
160
|
-
runtime.step = wrappedStep;
|
|
161
|
-
runtime.resetOptimizerState = () => {
|
|
162
|
-
t = 0;
|
|
163
|
-
innerReset();
|
|
164
|
-
};
|
|
165
|
-
}
|
|
166
|
-
/** Build a Record<paramName, Float32Array> by running each param's init
|
|
167
|
-
* function against its shape and uploading them to the runtime. Skips any
|
|
168
|
-
* param covered by `sharedParams` (those are owned by a sibling compile). */
|
|
169
|
-
function uploadInitialParams(plan, initFns, runtime, sharedParams) {
|
|
170
|
-
const out = {};
|
|
171
|
-
for (const [name, bufId] of plan.paramsByName) {
|
|
172
|
-
if (sharedParams?.has(name))
|
|
173
|
-
continue;
|
|
174
|
-
const shape = plan.buffers[bufId].shape;
|
|
175
|
-
const size = shape.reduce((a, b) => a * b, 1);
|
|
176
|
-
const initFn = initFns[name];
|
|
177
|
-
if (!initFn)
|
|
178
|
-
throw new Error(`compile: no init for param '${name}'`);
|
|
179
|
-
out[name] = initFn(size, shape);
|
|
180
|
-
}
|
|
181
|
-
if (Object.keys(out).length > 0)
|
|
182
|
-
runtime.uploadParams(out, { partial: !!sharedParams });
|
|
183
|
-
}
|
|
184
|
-
//# sourceMappingURL=compile.js.map
|
package/dist/compile.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"compile.js","sourceRoot":"","sources":["../src/compile.ts"],"names":[],"mappings":"AAAA,2EAA2E;AAC3E,EAAE;AACF,oBAAoB;AACpB,sEAAsE;AACtE,iEAAiE;AACjE,0EAA0E;AAC1E,0EAA0E;AAC1E,2EAA2E;AAC3E,mEAAmE;AAGnE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE,MAAM,YAAY,CAAA;AAC/C,OAAO,EAAE,UAAU,EAAmB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,UAAU,EAAoC,MAAM,WAAW,CAAA;AACxE,OAAO,EAAE,WAAW,EAAmB,MAAM,cAAc,CAAA;AAC3D,OAAO,EAAE,WAAW,EAAmB,MAAM,cAAc,CAAA;AAC3D,OAAO,EAAE,aAAa,EAAE,oBAAoB,EAAgE,MAAM,cAAc,CAAA;AAChI,OAAO,EAAE,MAAM,EAAE,iBAAiB,EAA2B,MAAM,aAAa,CAAA;AAoChF,yEAAyE;AACzE,MAAM,UAAU,WAAW,CAAC,OAAqB;IAC/C,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,CAAA;IAC5B,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC,KAAK,CAAC,CAAA;IAC9C,MAAM,IAAI,GAAG,WAAW,CAAC,KAAK,EAAE,UAAU,CAAC,CAAA;IAC3C,MAAM,OAAO,GAAG,WAAW,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACxC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAA;AACnD,CAAC;AAED,0EAA0E;AAC1E,MAAM,CAAC,KAAK,UAAU,OAAO,CAAC,OAAqB,EAAE,OAAoB,EAAE;IACzE,MAAM,EAAE,GAAG,WAAW,CAAC,OAAO,CAAC,CAAA;IAC/B,MAAM,YAAY,GAAG,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAE,CAAA;IAC5D,MAAM,OAAO,GAAG,MAAM,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,EAAE,YAAY,EAAE,IAAI,CAAC,CAAA;IAC5E,OAAO,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,EAAE,EAAE,EAAE,CAAC,CAAA;AACvC,CAAC;AAsDD;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,YAAqB,EACrB,OAAwB,EACxB,OAAgC,EAAE;IAElC,MAAM,EAAE,KAAK,EAAE,YAAY,EAAE,GAAG,WAAW,CAAC,YAAY,EAAE,OAAO,EAAE,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;IACrF,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC,KAAK,CAAC,CAAA;IAC9C,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI;QAC1B,CAAC,CAAC,UAAU,CAAC,KAAK,EAAE,UAAU,EAAE,YAAY,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,EAAE,YAAY,CAAC,UAAU,CAAC;QACzF,CAAC,CAAC,SAAS,CAAA;IAEb,MAAM,IAAI,GAAG,WAAW,CAAC,KAAK,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,IAAI,EAAE,CAAC,CAAA;IACzE,MAAM,OAAO,GAAG,WAAW,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACxC,MAAM,YAAY,GAAG,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAE,CAAA;IACtD,MAAM,OAAO,GAAG,MAAM,aAAa,CAAC,IAAI,EAAE,OAAO,EAAE,YAAY,EAAE,IAAI,CAAC,CAAA;IAEtE,IAAI,UAAU;QAAE,eAAe,CAAC,OAAO,EAAE,UAAU,CAAC,CAAA;IACpD,mBAAmB,CAAC,IAAI,EAAE,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAA;IAEtF,MAAM,EAAE,GAAe,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAA;IACjE,MAAM,WAAW,GAAG,YAAY,CAAC,OAAO,CAAC,CAAA;IAEzC,MAAM,KAAK,GAAG,GAAG,EAAE;QACjB,mBAAmB,CAAC,IAAI,EAAE,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,SAAS,CAAC,CAAA;QACnE,OAAO,CAAC,mBAAmB,EAAE,CAAA;IAC/B,CAAC,CAAA;IAED,MAAM,oBAAoB,GAAG,CAC3B,SAA0B,EAC1B,QAAwC,EAAE,EACV,EAAE,CAClC,cAAc,CAAO,YAAY,EAAE,SAAS,EAAE;QAC5C,GAAG,KAAK;QACR,MAAM,EAAE,OAAO,CAAC,MAAM;QACtB,YAAY,EAAE,OAAO,CAAC,MAAM;KAC7B,CAAC,CAAA;IAEJ,OAAO,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,EAAE,EAAE,EAAE,WAAW,EAAE,KAAK,EAAE,cAAc,EAAE,oBAAoB,EAAE,CAAC,CAAA;AACjG,CAAC;AAED,+EAA+E;AAC/E,uBAAuB;AACvB,+EAA+E;AAE/E;;;;;;;;;;;;;;;;;;GAkBG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAClC,YAAqB,EACrB,OAAwB,EACxB,OAAiC,EAAE;IAEnC,MAAM,EAAE,KAAK,EAAE,YAAY,EAAE,GAAG,WAAW,CAAC,YAAY,EAAE,OAAO,EAAE,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;IACrF,MAAM,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAE,CAAE,CAAA;IAEtD,MAAM,IAAI,GAAG,WAAW,CAAC,KAAK,EAAE,gBAAgB,CAAC,EAAE,CAAC,CAAA;IACpD,MAAM,OAAO,GAAG,WAAW,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACxC,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,YAAY,CAAC,EAAE,CAAE,CAAA;IAChE,MAAM,OAAO,GAAG,MAAM,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,cAAc,EAAE,IAAI,CAAC,CAAA;IAE/E,mBAAmB,CAAC,IAAI,EAAE,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAA;IAE3E,MAAM,EAAE,GAAe,EAAE,KAAK,EAAE,UAAU,EAAE,EAAE,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,OAAO,EAAE,CAAA;IACnF,OAAO,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,EAAE,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,OAAO,CAAC,EAAE,CAAC,CAAA;AAC3E,CAAC;AASD;;gEAEgE;AAChE,SAAS,WAAW,CAClB,YAAqB,EACrB,OAAwB,EACxB,UAAsB;IAEtB,MAAM,KAAK,GAAG,YAAY,EAAE,CAAA;IAC5B,IAAI,YAAY,GAAuB,EAAE,OAAO,EAAE,EAAE,EAAE,OAAO,EAAE,EAAE,EAAE,UAAU,EAAE,EAAE,EAAE,CAAA;IACnF,MAAM,KAAK,GAAG,KAAK,CAAC,GAAG,EAAE;QACvB,YAAY,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAA;QACvC,MAAM,YAAY,GAA2B,EAAE,CAAA;QAC/C,KAAK,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE,CAAC;YACtD,YAAY,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,CAAA;QACzE,CAAC;QACD,OAAO,OAAO,CAAC,KAAK,EAAE,YAAgC,CAAC,CAAA;IACzD,CAAC,CAAC,CAAA;IACF,OAAO,EAAE,KAAK,EAAE,YAAY,EAAE,CAAA;AAChC,CAAC;AAED,MAAM,YAAY,GAAG,CAAC,OAAqB,EAAU,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,CAAA;AAE1F;;;;2CAI2C;AAC3C,SAAS,eAAe,CAAC,OAAwB,EAAE,UAAsB;IACvE,MAAM,EAAE,YAAY,EAAE,oBAAoB,EAAE,MAAM,EAAE,GAAG,UAAU,CAAA;IACjE,IAAI,CAAC,GAAG,CAAC,CAAA;IACT,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC,CAAA;IAClC,MAAM,cAAc,GAAG,oBAAoB,CAAC,CAAC,CAAC,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACxE,MAAM,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAA4B,CAAA;IACvE,MAAM,UAAU,GAAG,OAAO,CAAC,mBAAmB,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;IAC5D,MAAM,WAAW,GAAG,CAAC,CACnB,MAAiD,EACjD,IAAqD,EACrD,EAAE;QACF,CAAC,EAAE,CAAA;QACH,MAAM,KAAK,GAAG,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;QAC1B,MAAM,CAAC,CAAC,CAAC,GAAG,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;QACxF,MAAM,MAAM,GAA8C,EAAE,GAAG,MAAM,EAAE,CAAC,YAAY,CAAC,EAAE,MAAM,EAAE,CAAA;QAC/F,IAAI,cAAc,IAAI,oBAAoB,EAAE,CAAC;YAC3C,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,KAAK,GAAG,MAAM,CAAC,WAAW,CAAA;YAClD,MAAM,CAAC,oBAAoB,CAAC,GAAG,cAAc,CAAA;QAC/C,CAAC;QACD,IAAI,IAAI,EAAE,QAAQ,KAAK,KAAK;YAAE,OAAO,SAAS,CAAC,MAAM,EAAE,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAA;QAC3E,IAAI,IAAI,EAAE,YAAY;YAAE,OAAO,SAAS,CAAC,MAAM,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAA;QACxE,OAAO,SAAS,CAAC,MAAM,CAAC,CAAA;IAC1B,CAAC,CAA4B,CAAA;IAC7B,OAAO,CAAC,IAAI,GAAG,WAAW,CAAA;IAC1B,OAAO,CAAC,mBAAmB,GAAG,GAAG,EAAE;QACjC,CAAC,GAAG,CAAC,CAAA;QACL,UAAU,EAAE,CAAA;IACd,CAAC,CAAA;AACH,CAAC;AAED;;8EAE8E;AAC9E,SAAS,mBAAmB,CAC1B,IAAgB,EAChB,OAA+B,EAC/B,OAA0C,EAC1C,YAAgD;IAEhD,MAAM,GAAG,GAAiC,EAAE,CAAA;IAC5C,KAAK,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;QAC9C,IAAI,YAAY,EAAE,GAAG,CAAC,IAAI,CAAC;YAAE,SAAQ;QACrC,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAE,CAAC,KAAK,CAAA;QACxC,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAA;QAC7C,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,CAAC,MAAM;YAAE,MAAM,IAAI,KAAK,CAAC,+BAA+B,IAAI,GAAG,CAAC,CAAA;QACpE,GAAG,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;IACjC,CAAC;IACD,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC;QAAE,OAAO,CAAC,YAAY,CAAC,GAAG,EAAE,EAAE,OAAO,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAA;AACzF,CAAC"}
|
package/dist/grad.js
DELETED
|
@@ -1,380 +0,0 @@
|
|
|
1
|
-
// Reverse-mode autograd over a traced Graph.
|
|
2
|
-
//
|
|
3
|
-
// Given a graph that ends in a scalar loss tensor, this module walks the ops
|
|
4
|
-
// in reverse and appends backward ops to the same graph, computing dL/dT for
|
|
5
|
-
// every Tensor T that descends from a `param_input`. The final cotangents on
|
|
6
|
-
// the param_input tensors are the parameter gradients.
|
|
7
|
-
//
|
|
8
|
-
// Cotangent accumulation: a tensor with multiple consumers ends up with
|
|
9
|
-
// contributions from each. We add them as we encounter them, so by the time
|
|
10
|
-
// reverse iteration reaches a tensor's producer op, its cotangent is complete.
|
|
11
|
-
//
|
|
12
|
-
// Why this works as "more graph nodes": the transpose rule for an op like
|
|
13
|
-
// mul(a, b)→c is `da += dc * b; db += dc * a`. The right-hand sides are
|
|
14
|
-
// expressible in terms of existing forward ops (mul) plus accumulation (add).
|
|
15
|
-
// We just call those op functions, which append nodes to the current graph
|
|
16
|
-
// because we run inside an active trace context.
|
|
17
|
-
import { add, sub, mul, div, mulScalar, matmul, matmulBatched, transpose, swapAxes, reshape, exp, broadcastTo, sumToShape, constScalar, reluGrad, sumLast, where, } from './ops.js';
|
|
18
|
-
import { traceInto } from './trace.js';
|
|
19
|
-
import { shapesEqual } from './shape.js';
|
|
20
|
-
// `appendGrad(graph)` augments `graph` (which must have already been built by
|
|
21
|
-
// `trace(...)` and must have a single scalar output = the loss) with backward
|
|
22
|
-
// ops. Returns gradients for every param_input.
|
|
23
|
-
//
|
|
24
|
-
// Internally re-enters the graph as the active trace context, so backward ops
|
|
25
|
-
// emitted by transpose rules append to it. The caller doesn't need to manage
|
|
26
|
-
// trace state.
|
|
27
|
-
export function appendGrad(graph) {
|
|
28
|
-
if (graph.outputs.length !== 1) {
|
|
29
|
-
throw new Error(`autograd: expected graph with exactly 1 output (the loss); got ${graph.outputs.length}`);
|
|
30
|
-
}
|
|
31
|
-
const lossId = graph.outputs[0];
|
|
32
|
-
const lossTensor = graph.tensors[lossId];
|
|
33
|
-
if (lossTensor.shape.length !== 0) {
|
|
34
|
-
throw new Error(`autograd: loss must be a rank-0 scalar; got shape [${lossTensor.shape.join(', ')}]. ` +
|
|
35
|
-
`Reduce with sumLast / mulScalar to a scalar before calling appendGrad.`);
|
|
36
|
-
}
|
|
37
|
-
// Snapshot the forward portion of the graph before we start emitting backward
|
|
38
|
-
// ops, so the reverse walk only iterates over forward ops.
|
|
39
|
-
const forwardOpCount = graph.ops.length;
|
|
40
|
-
const forwardOps = graph.ops.slice(0, forwardOpCount);
|
|
41
|
-
// cotangents: tensorId -> the Tensor representing dL/dTensor in the graph.
|
|
42
|
-
const cotangents = new Map();
|
|
43
|
-
return traceInto(graph, () => {
|
|
44
|
-
// Seed: dL/dLoss = 1.0
|
|
45
|
-
cotangents.set(lossId, constScalar(1.0, 'f32'));
|
|
46
|
-
// Reverse walk.
|
|
47
|
-
for (let i = forwardOpCount - 1; i >= 0; i--) {
|
|
48
|
-
const op = forwardOps[i];
|
|
49
|
-
const outCotan = cotangents.get(op.out);
|
|
50
|
-
if (!outCotan)
|
|
51
|
-
continue;
|
|
52
|
-
runTransposeRule(op, outCotan, graph, cotangents);
|
|
53
|
-
}
|
|
54
|
-
// Collect param gradients by name. Skip non-param leaves.
|
|
55
|
-
const paramGrads = {};
|
|
56
|
-
for (const op of forwardOps) {
|
|
57
|
-
if (op.kind !== 'param_input')
|
|
58
|
-
continue;
|
|
59
|
-
// (state_input and tensor_input don't produce gradients we hand back.)
|
|
60
|
-
const cotan = cotangents.get(op.out);
|
|
61
|
-
if (!cotan) {
|
|
62
|
-
// No path from this param to the loss — emit explicit zeros so the
|
|
63
|
-
// caller gets a tensor with the right shape.
|
|
64
|
-
const t = graph.tensors[op.out];
|
|
65
|
-
paramGrads[op.name] = broadcastTo(constScalar(0.0, t.dtype), t.shape);
|
|
66
|
-
}
|
|
67
|
-
else {
|
|
68
|
-
paramGrads[op.name] = cotan;
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
return { graph, paramGrads, loss: lossTensor };
|
|
72
|
-
});
|
|
73
|
-
}
|
|
74
|
-
// ============================================================================
|
|
75
|
-
// Cotangent accumulation
|
|
76
|
-
// ============================================================================
|
|
77
|
-
// Add `contribution` to the cotangent of tensor `inputId`. If a cotangent
|
|
78
|
-
// already exists, sum them (multiple consumers); otherwise initialize.
|
|
79
|
-
function accumulate(cotangents, inputId, contribution) {
|
|
80
|
-
const existing = cotangents.get(inputId);
|
|
81
|
-
if (existing) {
|
|
82
|
-
cotangents.set(inputId, add(existing, contribution));
|
|
83
|
-
}
|
|
84
|
-
else {
|
|
85
|
-
cotangents.set(inputId, contribution);
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
// Reduce a cotangent to match the input's shape, undoing any broadcast that
|
|
89
|
-
// occurred during forward. If `fromShape == toShape`, no-op.
|
|
90
|
-
function unbroadcast(cotan, toShape) {
|
|
91
|
-
if (shapesEqual(cotan.shape, toShape))
|
|
92
|
-
return cotan;
|
|
93
|
-
return sumToShape(cotan, toShape);
|
|
94
|
-
}
|
|
95
|
-
// ============================================================================
|
|
96
|
-
// Transpose rules
|
|
97
|
-
// ============================================================================
|
|
98
|
-
//
|
|
99
|
-
// One per OpNode kind. Each rule:
|
|
100
|
-
// * receives the forward op + its output cotangent
|
|
101
|
-
// * builds the backward expression(s) in graph terms (calling ops.ts functions)
|
|
102
|
-
// * accumulates cotangent contributions onto each input tensor
|
|
103
|
-
function runTransposeRule(op, outCotan, graph, cotangents) {
|
|
104
|
-
const tensorOf = (id) => graph.tensors[id];
|
|
105
|
-
switch (op.kind) {
|
|
106
|
-
// ---- Leaves: no inputs to accumulate into. -----------------------------
|
|
107
|
-
case 'param_input':
|
|
108
|
-
case 'tensor_input':
|
|
109
|
-
case 'state_input':
|
|
110
|
-
case 'arange':
|
|
111
|
-
case 'const_scalar':
|
|
112
|
-
return;
|
|
113
|
-
// ---- Element-wise binops (with broadcast) ------------------------------
|
|
114
|
-
// c = a op b; reduce cotan back to each operand's shape.
|
|
115
|
-
case 'add': {
|
|
116
|
-
const a = tensorOf(op.a), b = tensorOf(op.b);
|
|
117
|
-
accumulate(cotangents, op.a, unbroadcast(outCotan, a.shape));
|
|
118
|
-
accumulate(cotangents, op.b, unbroadcast(outCotan, b.shape));
|
|
119
|
-
return;
|
|
120
|
-
}
|
|
121
|
-
case 'sub': {
|
|
122
|
-
const a = tensorOf(op.a), b = tensorOf(op.b);
|
|
123
|
-
accumulate(cotangents, op.a, unbroadcast(outCotan, a.shape));
|
|
124
|
-
accumulate(cotangents, op.b, unbroadcast(mulScalar(outCotan, -1), b.shape));
|
|
125
|
-
return;
|
|
126
|
-
}
|
|
127
|
-
case 'mul': {
|
|
128
|
-
const a = tensorOf(op.a), b = tensorOf(op.b);
|
|
129
|
-
// dC/dA = b ; dC/dB = a. Both are forward tensors still alive in the graph.
|
|
130
|
-
// We must NOT consume the forward tensors — they're referenced by id.
|
|
131
|
-
// The mul() helper allocates fresh tensors, so referencing a/b multiple
|
|
132
|
-
// times in different mul() calls is fine: we just emit fresh ops.
|
|
133
|
-
accumulate(cotangents, op.a, unbroadcast(mul(outCotan, b), a.shape));
|
|
134
|
-
accumulate(cotangents, op.b, unbroadcast(mul(outCotan, a), b.shape));
|
|
135
|
-
return;
|
|
136
|
-
}
|
|
137
|
-
case 'div': {
|
|
138
|
-
// c = a/b. dc/da = 1/b. dc/db = -a/b^2.
|
|
139
|
-
const a = tensorOf(op.a), b = tensorOf(op.b);
|
|
140
|
-
accumulate(cotangents, op.a, unbroadcast(div(outCotan, b), a.shape));
|
|
141
|
-
// -outCotan * a / (b*b)
|
|
142
|
-
const numer = mul(outCotan, a);
|
|
143
|
-
const bSq = mul(b, b);
|
|
144
|
-
accumulate(cotangents, op.b, unbroadcast(mulScalar(div(numer, bSq), -1), b.shape));
|
|
145
|
-
return;
|
|
146
|
-
}
|
|
147
|
-
// ---- Element-wise scalar binops (scalar is a JS number, not a tensor) -
|
|
148
|
-
case 'mul_scalar': {
|
|
149
|
-
// c = a * s. dc/da = s.
|
|
150
|
-
accumulate(cotangents, op.a, mulScalar(outCotan, op.scalar));
|
|
151
|
-
return;
|
|
152
|
-
}
|
|
153
|
-
case 'add_scalar': {
|
|
154
|
-
// c = a + s. dc/da = 1.
|
|
155
|
-
accumulate(cotangents, op.a, outCotan);
|
|
156
|
-
return;
|
|
157
|
-
}
|
|
158
|
-
// ---- Unary -------------------------------------------------------------
|
|
159
|
-
case 'sqrt': {
|
|
160
|
-
// c = sqrt(a). dc/da = 1/(2*sqrt(a)) = 1/(2*c).
|
|
161
|
-
const c = tensorOf(op.out);
|
|
162
|
-
accumulate(cotangents, op.a, mulScalar(div(outCotan, c), 0.5));
|
|
163
|
-
return;
|
|
164
|
-
}
|
|
165
|
-
case 'rsqrt': {
|
|
166
|
-
// c = a^(-0.5). dc/da = -0.5 * a^(-1.5) = -0.5 * c^3.
|
|
167
|
-
const c = tensorOf(op.out);
|
|
168
|
-
const c3 = mul(mul(c, c), c);
|
|
169
|
-
accumulate(cotangents, op.a, mulScalar(mul(outCotan, c3), -0.5));
|
|
170
|
-
return;
|
|
171
|
-
}
|
|
172
|
-
case 'log': {
|
|
173
|
-
// c = log(a). dc/da = 1/a.
|
|
174
|
-
const a = tensorOf(op.a);
|
|
175
|
-
accumulate(cotangents, op.a, div(outCotan, a));
|
|
176
|
-
return;
|
|
177
|
-
}
|
|
178
|
-
case 'exp': {
|
|
179
|
-
// c = exp(a). dc/da = exp(a) = c.
|
|
180
|
-
const c = tensorOf(op.out);
|
|
181
|
-
accumulate(cotangents, op.a, mul(outCotan, c));
|
|
182
|
-
return;
|
|
183
|
-
}
|
|
184
|
-
case 'relu': {
|
|
185
|
-
// c = relu(a). dc/da = (a > 0 ? 1 : 0). Use the fused relu_grad op.
|
|
186
|
-
const a = tensorOf(op.a);
|
|
187
|
-
accumulate(cotangents, op.a, reluGrad(a, outCotan));
|
|
188
|
-
return;
|
|
189
|
-
}
|
|
190
|
-
// ---- Reductions over last axis ---------------------------------------
|
|
191
|
-
case 'mean_last': {
|
|
192
|
-
// c[..., 1] = mean over last axis of a[..., D]. da[..., d] = dc[..., 0] / D.
|
|
193
|
-
// outCotan has shape [..., 1]; broadcast to a's shape and divide by D.
|
|
194
|
-
const a = tensorOf(op.a);
|
|
195
|
-
const D = a.shape[a.shape.length - 1];
|
|
196
|
-
const expanded = broadcastTo(outCotan, a.shape);
|
|
197
|
-
accumulate(cotangents, op.a, mulScalar(expanded, 1 / D));
|
|
198
|
-
return;
|
|
199
|
-
}
|
|
200
|
-
case 'sum_last': {
|
|
201
|
-
// c[...] = sum over last axis (keepdims=false). da[..., d] = dc[...].
|
|
202
|
-
// outCotan has rank one less than a; broadcast to a's shape (which inserts
|
|
203
|
-
// back the last axis with a's last-axis size).
|
|
204
|
-
const a = tensorOf(op.a);
|
|
205
|
-
// First reshape outCotan to add a trailing 1, then broadcast to a's shape.
|
|
206
|
-
const withKeep = reshape(outCotan, [...outCotan.shape, 1]);
|
|
207
|
-
accumulate(cotangents, op.a, broadcastTo(withKeep, a.shape));
|
|
208
|
-
return;
|
|
209
|
-
}
|
|
210
|
-
// ---- Shape ------------------------------------------------------------
|
|
211
|
-
case 'reshape': {
|
|
212
|
-
// c = reshape(a, ...). Backward: reshape outCotan back to a's shape.
|
|
213
|
-
const a = tensorOf(op.a);
|
|
214
|
-
accumulate(cotangents, op.a, reshape(outCotan, a.shape));
|
|
215
|
-
return;
|
|
216
|
-
}
|
|
217
|
-
case 'transpose': {
|
|
218
|
-
// c = transpose(a, perm). Backward: transpose outCotan with inverse perm.
|
|
219
|
-
const inv = invertPerm(op.perm);
|
|
220
|
-
accumulate(cotangents, op.a, transpose(outCotan, inv));
|
|
221
|
-
return;
|
|
222
|
-
}
|
|
223
|
-
// ---- Linear algebra ---------------------------------------------------
|
|
224
|
-
case 'matmul': {
|
|
225
|
-
// c = a @ b, where a: [..., M, K], b: [K, N], c: [..., M, N].
|
|
226
|
-
// dA = dC @ B^T (matmul, since b is unbatched)
|
|
227
|
-
// dB = sum_over_batch( A^T @ dC )
|
|
228
|
-
//
|
|
229
|
-
// Implementation note: dA uses the same `matmul` (a [...,M,N] · b [N,K])
|
|
230
|
-
// because b is rank-2. dB needs A^T which has shape [..., K, M], then
|
|
231
|
-
// matmul with dC ([..., M, N]) gives [..., K, N], which we sum over
|
|
232
|
-
// leading batch dims to get [K, N].
|
|
233
|
-
const a = tensorOf(op.a), b = tensorOf(op.b);
|
|
234
|
-
// dA = dC @ B^T
|
|
235
|
-
accumulate(cotangents, op.a, matmul(outCotan, swapAxes(b, -1, -2)));
|
|
236
|
-
// dB: per-batch A^T @ dC, then sum over batch dims.
|
|
237
|
-
// A is [..., M, K]; transpose last two axes.
|
|
238
|
-
const aT = swapAxes(a, -1, -2); // [..., K, M]
|
|
239
|
-
// matmul_batched needs same rank on both sides. dC has rank `a.rank`;
|
|
240
|
-
// aT has rank `a.rank`; use matmul_batched if rank > 2, else matmul.
|
|
241
|
-
let perBatchDb;
|
|
242
|
-
if (a.shape.length > 2) {
|
|
243
|
-
perBatchDb = matmulBatched(aT, outCotan); // [..., K, N]
|
|
244
|
-
}
|
|
245
|
-
else {
|
|
246
|
-
perBatchDb = matmul(aT, outCotan); // [K, N]
|
|
247
|
-
}
|
|
248
|
-
// Sum over leading batch dims to collapse to b's shape [K, N].
|
|
249
|
-
accumulate(cotangents, op.b, sumToShape(perBatchDb, b.shape));
|
|
250
|
-
return;
|
|
251
|
-
}
|
|
252
|
-
case 'matmul_batched': {
|
|
253
|
-
// c = a @ b, both [..., M, K] · [..., K, N] -> [..., M, N].
|
|
254
|
-
// dA = dC @ B^T (per-batch, all batch dims preserved)
|
|
255
|
-
// dB = A^T @ dC (per-batch)
|
|
256
|
-
const a = tensorOf(op.a), b = tensorOf(op.b);
|
|
257
|
-
accumulate(cotangents, op.a, matmulBatched(outCotan, swapAxes(b, -1, -2)));
|
|
258
|
-
accumulate(cotangents, op.b, matmulBatched(swapAxes(a, -1, -2), outCotan));
|
|
259
|
-
return;
|
|
260
|
-
}
|
|
261
|
-
// ---- Indexing / casting (no gradient through integer indices) --------
|
|
262
|
-
case 'one_hot':
|
|
263
|
-
// The output is float, but the input (indices) is integer-valued — no
|
|
264
|
-
// continuous gradient flows through it. Stop here.
|
|
265
|
-
return;
|
|
266
|
-
// ---- Slicing ---------------------------------------------------------
|
|
267
|
-
case 'slice_last_range': {
|
|
268
|
-
// c = a[..., start:end]. Backward: pad outCotan with zeros to a's shape.
|
|
269
|
-
// We construct this as: zeros at left, outCotan in middle, zeros at right,
|
|
270
|
-
// concatenated along the last axis. We don't have concat or generic pad
|
|
271
|
-
// ops; the simplest expression here is a sparse expansion via broadcasting
|
|
272
|
-
// and addition of zero tensors. For Phase 2 we punt: slice's autograd is
|
|
273
|
-
// implemented by emitting a single fused op that scatters the cotangent.
|
|
274
|
-
// For now: signal that slice's backward needs a dedicated op kind.
|
|
275
|
-
const a = tensorOf(op.a);
|
|
276
|
-
// Build a zeros tensor of a's shape, then add via... no, we can't do
|
|
277
|
-
// additive scatter without an index_put. Easiest path: add a dedicated
|
|
278
|
-
// backward op kind. For this pass, throw until we extend the IR.
|
|
279
|
-
throw new Error(`autograd: slice_last_range backward not implemented yet ` +
|
|
280
|
-
`(would need a scatter-style op or a Concat op). ` +
|
|
281
|
-
`Workaround for now: avoid taking gradients through slices by using ` +
|
|
282
|
-
`separate matmuls for Q/K/V instead of a fused W_qkv. ` +
|
|
283
|
-
`Tensor: ${a.shape} -> ${tensorOf(op.out).shape}`);
|
|
284
|
-
}
|
|
285
|
-
// ---- Broadcast / un-broadcast (autograd infrastructure) ---------------
|
|
286
|
-
case 'broadcast_to': {
|
|
287
|
-
// c = broadcast(a, target). da = sum_to_shape(dc, a.shape).
|
|
288
|
-
const a = tensorOf(op.a);
|
|
289
|
-
accumulate(cotangents, op.a, sumToShape(outCotan, a.shape));
|
|
290
|
-
return;
|
|
291
|
-
}
|
|
292
|
-
case 'sum_to_shape': {
|
|
293
|
-
// c = sum_to_shape(a, target). da = broadcast_to(dc, a.shape).
|
|
294
|
-
const a = tensorOf(op.a);
|
|
295
|
-
accumulate(cotangents, op.a, broadcastTo(outCotan, a.shape));
|
|
296
|
-
return;
|
|
297
|
-
}
|
|
298
|
-
// ---- ML primitives ---------------------------------------------------
|
|
299
|
-
case 'log_softmax_last': {
|
|
300
|
-
// c = log_softmax(a, axis=-1). softmax(a) = exp(c).
|
|
301
|
-
// dL/dA = dL/dC - softmax(a) * sum_last_keepdims(dL/dC)
|
|
302
|
-
const c = tensorOf(op.out);
|
|
303
|
-
const sm = exp(c); // softmax(a)
|
|
304
|
-
// sum_last with keepdims via reshape: sum_last drops the dim, then
|
|
305
|
-
// reshape to add a trailing 1 back, then broadcast multiplies.
|
|
306
|
-
const sumDc = sumLast(outCotan); // shape: [..., ] (rank-1 less)
|
|
307
|
-
const sumDcKeep = reshape(sumDc, [...sumDc.shape, 1]);
|
|
308
|
-
const term = mul(sm, broadcastTo(sumDcKeep, c.shape));
|
|
309
|
-
accumulate(cotangents, op.a, sub(outCotan, term));
|
|
310
|
-
return;
|
|
311
|
-
}
|
|
312
|
-
case 'softmax_causal_last': {
|
|
313
|
-
// c = softmax_causal(a, axis=-1). The causal mask zeros the upper triangle
|
|
314
|
-
// of c; for the backward, the same mask zeros out dx_upper because both
|
|
315
|
-
// paths through softmax depend on c-values that are 0 there.
|
|
316
|
-
// dL/dA = (dL/dC - sum_last_keep(dL/dC * c)) * c
|
|
317
|
-
const c = tensorOf(op.out);
|
|
318
|
-
const dcXc = mul(outCotan, c);
|
|
319
|
-
const s = sumLast(dcXc);
|
|
320
|
-
const sKeep = reshape(s, [...s.shape, 1]);
|
|
321
|
-
const inner = sub(outCotan, broadcastTo(sKeep, c.shape));
|
|
322
|
-
accumulate(cotangents, op.a, mul(inner, c));
|
|
323
|
-
return;
|
|
324
|
-
}
|
|
325
|
-
// ---- Comparisons + select ---------------------------------------------
|
|
326
|
-
case 'less':
|
|
327
|
-
case 'greater':
|
|
328
|
-
// No gradient flows through bool comparisons. Stop here.
|
|
329
|
-
return;
|
|
330
|
-
case 'where': {
|
|
331
|
-
// c = where(cond, a, b).
|
|
332
|
-
// dC flows to a where cond is true, to b where cond is false.
|
|
333
|
-
// Need broadcast-aware unreduction back to a's and b's original shapes.
|
|
334
|
-
const cond = tensorOf(op.cond);
|
|
335
|
-
const a = tensorOf(op.a);
|
|
336
|
-
const b = tensorOf(op.b);
|
|
337
|
-
// Build zero tensors via broadcasting a 0-d const scalar.
|
|
338
|
-
const zeroA = broadcastTo(constScalar(0, a.dtype), outCotan.shape);
|
|
339
|
-
const zeroB = broadcastTo(constScalar(0, b.dtype), outCotan.shape);
|
|
340
|
-
accumulate(cotangents, op.a, unbroadcast(where(cond, outCotan, zeroA), a.shape));
|
|
341
|
-
accumulate(cotangents, op.b, unbroadcast(where(cond, zeroB, outCotan), b.shape));
|
|
342
|
-
return;
|
|
343
|
-
}
|
|
344
|
-
case 'where_causal': {
|
|
345
|
-
// c = where(causal_mask, a, fillValue). Upper triangle becomes constant
|
|
346
|
-
// (no gradient); lower triangle passes a through. So da_lower = dc_lower,
|
|
347
|
-
// da_upper = 0. We can't easily express this with current ops; punt.
|
|
348
|
-
throw new Error(`autograd: where_causal backward not yet implemented. ` +
|
|
349
|
-
`Use softmax_causal_last (which fuses the mask + softmax) instead.`);
|
|
350
|
-
}
|
|
351
|
-
// ---- Adam ops are post-autograd; no backward through them. ----------
|
|
352
|
-
case 'adam_update_m':
|
|
353
|
-
case 'adam_update_v':
|
|
354
|
-
case 'adam_update_p':
|
|
355
|
-
throw new Error(`autograd: cannot differentiate through ${op.kind}`);
|
|
356
|
-
// ---- relu_grad has no further backward (autograd-internal) ----------
|
|
357
|
-
case 'relu_grad': {
|
|
358
|
-
// We don't double-differentiate. If someone tries, this will blow up —
|
|
359
|
-
// intentional. Phase 2 doesn't need 2nd-order gradients.
|
|
360
|
-
throw new Error(`autograd: cannot take second-order gradient through relu_grad. ` +
|
|
361
|
-
`Phase 2 does not support higher-order autodiff.`);
|
|
362
|
-
}
|
|
363
|
-
default: {
|
|
364
|
-
// Exhaustiveness check at type level.
|
|
365
|
-
const _exhaustive = op;
|
|
366
|
-
void _exhaustive;
|
|
367
|
-
throw new Error(`autograd: unhandled op kind ${op.kind}`);
|
|
368
|
-
}
|
|
369
|
-
}
|
|
370
|
-
}
|
|
371
|
-
// ============================================================================
|
|
372
|
-
// Helpers
|
|
373
|
-
// ============================================================================
|
|
374
|
-
function invertPerm(perm) {
|
|
375
|
-
const inv = new Array(perm.length);
|
|
376
|
-
for (let i = 0; i < perm.length; i++)
|
|
377
|
-
inv[perm[i]] = i;
|
|
378
|
-
return inv;
|
|
379
|
-
}
|
|
380
|
-
//# sourceMappingURL=grad.js.map
|