klongpy 0.7.0__tar.gz → 0.7.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- klongpy-0.7.1/MANIFEST.in +5 -0
- {klongpy-0.7.0/klongpy.egg-info → klongpy-0.7.1}/PKG-INFO +146 -95
- {klongpy-0.7.0 → klongpy-0.7.1}/README.md +136 -87
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/__init__.py +0 -2
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/adverbs.py +84 -82
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/autograd.py +0 -9
- klongpy-0.7.1/klongpy/backend.py +38 -0
- klongpy-0.7.1/klongpy/backends/__init__.py +26 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/backends/base.py +154 -5
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/backends/numpy_backend.py +2 -1
- klongpy-0.7.1/klongpy/backends/registry.py +76 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/backends/torch_backend.py +83 -31
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/cli.py +50 -7
- klongpy-0.7.1/klongpy/core.py +113 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/db/sys_fn_db.py +3 -3
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/db/sys_fn_kvs.py +2 -4
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/dyads.py +203 -162
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/interpreter.py +32 -15
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/monads.py +99 -89
- klongpy-0.7.1/klongpy/parser.py +328 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/repl.py +2 -2
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/sys_fn.py +53 -15
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/sys_fn_ipc.py +4 -9
- klongpy-0.7.1/klongpy/types.py +503 -0
- klongpy-0.7.1/klongpy/writer.py +122 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/ws/sys_fn_ws.py +5 -8
- {klongpy-0.7.0 → klongpy-0.7.1/klongpy.egg-info}/PKG-INFO +146 -95
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy.egg-info/SOURCES.txt +6 -17
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy.egg-info/requires.txt +6 -4
- {klongpy-0.7.0 → klongpy-0.7.1}/pyproject.toml +9 -7
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_accel.py +15 -9
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_autograd_parametrized.py +32 -40
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_backend.py +134 -122
- klongpy-0.7.1/tests/test_backend_abstraction.py +31 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_cli_exit.py +5 -3
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_eval_monad_list.py +3 -4
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_extra_suite.py +31 -32
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_helpers.py +2 -2
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_interop.py +3 -3
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_kg_asarray.py +13 -15
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_kgtests.py +0 -2
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_known_bugs.py +7 -7
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_reshape_strings.py +5 -6
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_suite.py +5 -1
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn.py +2 -3
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_coverage.py +2 -9
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_db.py +3 -4
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_kvs.py +3 -3
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_timer.py +1 -1
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_ws_coverage.py +2 -2
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_torch_backend.py +52 -47
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_util.py +20 -12
- klongpy-0.7.0/MANIFEST.in +0 -2
- klongpy-0.7.0/klongpy/backend.py +0 -171
- klongpy-0.7.0/klongpy/backends/__init__.py +0 -94
- klongpy-0.7.0/klongpy/core.py +0 -1094
- klongpy-0.7.0/tests/__init__.py +0 -6
- klongpy-0.7.0/tests/gen_join_over.py +0 -119
- klongpy-0.7.0/tests/gen_py_suite.py +0 -83
- klongpy-0.7.0/tests/gen_test_fn.py +0 -259
- klongpy-0.7.0/tests/perf_async.py +0 -25
- klongpy-0.7.0/tests/perf_avg.py +0 -18
- klongpy-0.7.0/tests/perf_duckdb.py +0 -32
- klongpy-0.7.0/tests/perf_gen.py +0 -38
- klongpy-0.7.0/tests/perf_ipc_overhead.py +0 -34
- klongpy-0.7.0/tests/perf_join.py +0 -53
- klongpy-0.7.0/tests/perf_load.py +0 -17
- klongpy-0.7.0/tests/perf_prog.py +0 -18
- klongpy-0.7.0/tests/perf_serdes.py +0 -52
- klongpy-0.7.0/tests/perf_sys_fn_db.py +0 -263
- klongpy-0.7.0/tests/perf_vector.py +0 -163
- klongpy-0.7.0/tests/utils.py +0 -254
- {klongpy-0.7.0 → klongpy-0.7.1}/LICENSE +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/db/__init__.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/db/df_cache.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/db/file_cache.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/db/helpers.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/csv.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/edt.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/eigenv.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/help.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/huffman.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/math.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/nstat.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/print.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/set.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/spline.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/time.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/lib/util.kg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/sys_fn_autograd.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/sys_fn_timer.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/sys_var.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/utils.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/web/__init__.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/web/sys_fn_web.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy/ws/__init__.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy.egg-info/dependency_links.txt +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy.egg-info/entry_points.txt +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/klongpy.egg-info/top_level.txt +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/setup.cfg +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_core_fn_wrapper.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_df_cache.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_examples.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_file_cache.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_prog.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_suite_file.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_ipc.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_ipc_coverage.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_web.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_web_coverage.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_sys_fn_ws.py +0 -0
- {klongpy-0.7.0 → klongpy-0.7.1}/tests/test_utils_coverage.py +0 -0
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: klongpy
|
|
3
|
-
Version: 0.7.
|
|
3
|
+
Version: 0.7.1
|
|
4
4
|
Summary: High-Performance Klong array language with rich Python integration.
|
|
5
5
|
Author: Brian Guarraci
|
|
6
6
|
License-Expression: MIT
|
|
7
|
-
Project-URL: Homepage,
|
|
7
|
+
Project-URL: Homepage, https://klongpy.org
|
|
8
8
|
Project-URL: Repository, https://github.com/briangu/klongpy
|
|
9
|
-
Project-URL: Documentation,
|
|
9
|
+
Project-URL: Documentation, https://klongpy.org
|
|
10
10
|
Keywords: klong,array,language,interpreter,numpy,torch
|
|
11
11
|
Classifier: Development Status :: 4 - Beta
|
|
12
12
|
Classifier: Intended Audience :: Developers
|
|
@@ -19,7 +19,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.12
|
|
20
20
|
Classifier: Topic :: Scientific/Engineering
|
|
21
21
|
Classifier: Topic :: Software Development :: Interpreters
|
|
22
|
-
Requires-Python: <3.
|
|
22
|
+
Requires-Python: <3.15,>=3.9
|
|
23
23
|
Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
|
25
25
|
Requires-Dist: numpy>=2.0
|
|
@@ -30,8 +30,8 @@ Requires-Dist: colorama==0.4.6; extra == "repl"
|
|
|
30
30
|
Provides-Extra: web
|
|
31
31
|
Requires-Dist: aiohttp==3.9.4; extra == "web"
|
|
32
32
|
Provides-Extra: db
|
|
33
|
-
Requires-Dist: pandas==
|
|
34
|
-
Requires-Dist: duckdb==1.
|
|
33
|
+
Requires-Dist: pandas==3.0.0; extra == "db"
|
|
34
|
+
Requires-Dist: duckdb==1.4.4; extra == "db"
|
|
35
35
|
Provides-Extra: ws
|
|
36
36
|
Requires-Dist: websockets==12.0; extra == "ws"
|
|
37
37
|
Provides-Extra: docs
|
|
@@ -41,8 +41,8 @@ Provides-Extra: all
|
|
|
41
41
|
Requires-Dist: torch; extra == "all"
|
|
42
42
|
Requires-Dist: colorama==0.4.6; extra == "all"
|
|
43
43
|
Requires-Dist: aiohttp==3.9.4; extra == "all"
|
|
44
|
-
Requires-Dist: pandas==
|
|
45
|
-
Requires-Dist: duckdb==1.
|
|
44
|
+
Requires-Dist: pandas==3.0.0; extra == "all"
|
|
45
|
+
Requires-Dist: duckdb==1.4.4; extra == "all"
|
|
46
46
|
Requires-Dist: websockets==12.0; extra == "all"
|
|
47
47
|
Requires-Dist: mkdocs>=1.6; extra == "all"
|
|
48
48
|
Requires-Dist: mkdocs-material>=9.7; extra == "all"
|
|
@@ -51,6 +51,8 @@ Requires-Dist: pytest>=7.0; extra == "dev"
|
|
|
51
51
|
Requires-Dist: pytest-asyncio>=0.21; extra == "dev"
|
|
52
52
|
Requires-Dist: mkdocs>=1.6; extra == "dev"
|
|
53
53
|
Requires-Dist: mkdocs-material>=9.7; extra == "dev"
|
|
54
|
+
Requires-Dist: build>=1.2; extra == "dev"
|
|
55
|
+
Requires-Dist: twine>=5.1; extra == "dev"
|
|
54
56
|
Dynamic: license-file
|
|
55
57
|
|
|
56
58
|
|
|
@@ -67,7 +69,14 @@ Dynamic: license-file
|
|
|
67
69
|
|
|
68
70
|
# KlongPy: A High-Performance Array Language with Autograd
|
|
69
71
|
|
|
70
|
-
KlongPy
|
|
72
|
+
KlongPy is a Python adaptation of the [Klong](https://t3x.org/klong) [array language](https://en.wikipedia.org/wiki/Array_programming), offering high-performance vectorized operations. It prioritizes compatibility with Python, thus allowing seamless integration of Python's expansive ecosystem while retaining Klong's succinctness.
|
|
73
|
+
|
|
74
|
+
KlongPy backends include [NumPy](https://numpy.org/) and optional [PyTorch](https://pytorch.org/) (CPU, CUDA, and Apple MPS).
|
|
75
|
+
When PyTorch is enabled, automatic differentiation (autograd) is supported; otherwise, numeric differentiation is the default.
|
|
76
|
+
|
|
77
|
+
Full documentation: [https://klongpy.org](https://klongpy.org)
|
|
78
|
+
|
|
79
|
+
New to v0.7.0, KlongPy now brings gradient-based programming to an already-succinct array language, so you can differentiate compact array expressions directly. It's also a batteries-included system with IPC, DuckDB-backed database tooling, web/websocket support, and other integrations exposed seamlessly from the language.
|
|
71
80
|
|
|
72
81
|
Backends include NumPy and optional PyTorch (CPU, CUDA, and Apple MPS). When PyTorch is enabled, gradients use autograd; otherwise numeric differentiation is the default.
|
|
73
82
|
|
|
@@ -86,18 +95,11 @@ print(x) # ~0
|
|
|
86
95
|
|
|
87
96
|
**KlongPy gradient descent (2 lines):**
|
|
88
97
|
```klong
|
|
89
|
-
f::{x^2};
|
|
90
|
-
{
|
|
91
|
-
```
|
|
92
|
-
|
|
93
|
-
**Or with custom optimizer (copy from examples/):**
|
|
94
|
-
```klong
|
|
95
|
-
.pyf("optimizers";"SGDOptimizer")
|
|
96
|
-
x::5.0; opt::SGDOptimizer(klong;["x"];:{["lr" 0.1]})
|
|
97
|
-
{opt({x^2})}'!100 :" x -> 0
|
|
98
|
+
f::{x^2}; s::5.0
|
|
99
|
+
{s::s-(0.1*f:>s)}'!100 :" s -> 0"
|
|
98
100
|
```
|
|
99
101
|
|
|
100
|
-
|
|
102
|
+
Array languages like APL, K, and Q revolutionized finance by treating operations as data transformations, not loops. KlongPy brings this philosophy to machine learning: gradients become expressions you compose, not boilerplate you maintain. The result is a succint mathematical-like notation that is automatically extended to machine learning.
|
|
101
103
|
|
|
102
104
|
## Quick Install
|
|
103
105
|
|
|
@@ -108,27 +110,41 @@ kgpy
|
|
|
108
110
|
|
|
109
111
|
# Enable torch backend (autograd + GPU)
|
|
110
112
|
pip install "klongpy[torch]"
|
|
111
|
-
|
|
113
|
+
kgpy --backend torch
|
|
112
114
|
|
|
113
115
|
# Everything (web, db, websockets, torch, repl)
|
|
114
116
|
pip install "klongpy[all]"
|
|
115
117
|
```
|
|
116
118
|
|
|
119
|
+
## REPL
|
|
120
|
+
|
|
121
|
+
```bash
|
|
122
|
+
$ kgpy
|
|
123
|
+
Welcome to KlongPy REPL v0.7.0
|
|
124
|
+
Author: Brian Guarraci
|
|
125
|
+
Web: http://klongpy.org
|
|
126
|
+
Backend: torch (mps)
|
|
127
|
+
]h for help; Ctrl-D or ]q to quit
|
|
128
|
+
|
|
129
|
+
$>
|
|
130
|
+
```
|
|
131
|
+
|
|
117
132
|
## Why KlongPy?
|
|
118
133
|
|
|
119
134
|
### For Quants and Traders
|
|
120
135
|
|
|
121
|
-
|
|
136
|
+
Optimize portfolios with gradients in a language designed for arrays:
|
|
122
137
|
|
|
123
138
|
```klong
|
|
124
|
-
:"
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
139
|
+
:" Portfolio optimization: gradient of Sharpe ratio"
|
|
140
|
+
returns::[0.05 0.08 0.03 0.10] :" Annual returns per asset"
|
|
141
|
+
vols::[0.15 0.20 0.10 0.25] :" Volatilities per asset"
|
|
142
|
+
w::[0.25 0.25 0.25 0.25] :" Portfolio weights"
|
|
143
|
+
|
|
144
|
+
sharpe::{(+/x*returns)%((+/((x^2)*(vols^2)))^0.5)}
|
|
145
|
+
sg::sharpe:>w :" Gradient of Sharpe ratio"
|
|
146
|
+
.d("sharpe gradient="); .p(sg)
|
|
147
|
+
sharpe gradient=[0.07257738709449768 0.032256484031677246 0.11693036556243896 -0.22176480293273926]
|
|
132
148
|
```
|
|
133
149
|
|
|
134
150
|
### For ML Researchers
|
|
@@ -136,13 +152,18 @@ loss:>m :" Gradient w.r.t. long window
|
|
|
136
152
|
Neural networks in pure array notation:
|
|
137
153
|
|
|
138
154
|
```klong
|
|
139
|
-
:" Single
|
|
155
|
+
:" Single-layer neural network with gradient descent"
|
|
156
|
+
.bkf(["exp"])
|
|
140
157
|
sigmoid::{1%(1+exp(0-x))}
|
|
141
158
|
forward::{sigmoid((w1*x)+b1)}
|
|
142
|
-
|
|
159
|
+
X::[0.5 1.0 1.5 2.0]; Y::[0.2 0.4 0.6 0.8]
|
|
160
|
+
w1::0.1; b1::0.1; lr::0.1
|
|
161
|
+
loss::{+/((forward'X)-Y)^2}
|
|
143
162
|
|
|
144
|
-
:" Train with multi-param gradients
|
|
163
|
+
:" Train with multi-param gradients"
|
|
145
164
|
{grads::loss:>[w1 b1]; w1::w1-(lr*grads@0); b1::b1-(lr*grads@1)}'!1000
|
|
165
|
+
.d("w1="); .d(w1); .d(" b1="); .p(b1)
|
|
166
|
+
w1=1.74 b1=-2.17
|
|
146
167
|
```
|
|
147
168
|
|
|
148
169
|
### For Scientists
|
|
@@ -150,9 +171,10 @@ loss::{+/(forward'X - Y)^2}
|
|
|
150
171
|
Express mathematics directly:
|
|
151
172
|
|
|
152
173
|
```klong
|
|
153
|
-
:" Gradient of f(x,y,z) = x^2 + y^2 + z^2 at [1,2,3]
|
|
174
|
+
:" Gradient of f(x,y,z) = x^2 + y^2 + z^2 at [1,2,3]"
|
|
154
175
|
f::{+/x^2}
|
|
155
|
-
f:>[1 2 3]
|
|
176
|
+
f:>[1 2 3]
|
|
177
|
+
[2.0 4.0 6.0]
|
|
156
178
|
```
|
|
157
179
|
|
|
158
180
|
## The Array Language Advantage
|
|
@@ -174,9 +196,33 @@ KlongPy inherits from the [APL](https://en.wikipedia.org/wiki/APL_(programming_l
|
|
|
174
196
|
|
|
175
197
|
## Performance: NumPy vs PyTorch Backend
|
|
176
198
|
|
|
177
|
-
The PyTorch backend provides significant speedups for large arrays with GPU acceleration:
|
|
199
|
+
The PyTorch backend provides significant speedups for large arrays with GPU acceleration (RTX 4090 in this case):
|
|
200
|
+
|
|
201
|
+
```
|
|
202
|
+
$ python3 tests/perf_vector.py
|
|
203
|
+
============================================================
|
|
204
|
+
VECTOR OPS (element-wise, memory-bound)
|
|
205
|
+
Size: 10,000,000 elements, Iterations: 100
|
|
206
|
+
============================================================
|
|
207
|
+
NumPy (baseline) 0.021854s
|
|
208
|
+
KlongPy (numpy) 0.001413s (15.46x vs NumPy)
|
|
209
|
+
KlongPy (torch, cpu) 0.000029s (761.22x vs NumPy)
|
|
210
|
+
KlongPy (torch, cuda) 0.000028s (784.04x vs NumPy)
|
|
211
|
+
|
|
212
|
+
============================================================
|
|
213
|
+
MATRIX MULTIPLY (compute-bound, GPU advantage)
|
|
214
|
+
Size: 4000x4000, Iterations: 5
|
|
215
|
+
============================================================
|
|
216
|
+
NumPy (baseline) 0.078615s
|
|
217
|
+
KlongPy (numpy) 0.075400s (1.04x vs NumPy)
|
|
218
|
+
KlongPy (torch, cpu) 0.077350s (1.02x vs NumPy)
|
|
219
|
+
KlongPy (torch, cuda) 0.002339s (33.62x vs NumPy)
|
|
220
|
+
```
|
|
221
|
+
|
|
222
|
+
Also supporting Apple Silicon MPS (M1 Mac Studio) enables fast local work:
|
|
178
223
|
|
|
179
224
|
```
|
|
225
|
+
$ python tests/perf_backend.py --compare
|
|
180
226
|
Benchmark NumPy (ms) Torch (ms) Speedup
|
|
181
227
|
----------------------------------------------------------------------
|
|
182
228
|
vector_add_1M 0.327 0.065 5.02x (torch)
|
|
@@ -186,11 +232,9 @@ grade_up_100K 0.588 0.199 2.96x (torch)
|
|
|
186
232
|
enumerate_1M 0.141 0.050 2.83x (torch)
|
|
187
233
|
```
|
|
188
234
|
|
|
189
|
-
*Benchmarks on Apple M1 with MPS. Run `python tests/perf_backend.py --compare` for your system.*
|
|
190
|
-
|
|
191
235
|
## Complete Feature Set
|
|
192
236
|
|
|
193
|
-
KlongPy
|
|
237
|
+
KlongPy is a batteries-included platform with kdb+/Q-inspired features:
|
|
194
238
|
|
|
195
239
|
### Core Language
|
|
196
240
|
- **Vectorized Operations**: NumPy/PyTorch-powered bulk array operations
|
|
@@ -230,8 +274,8 @@ KlongPy uses Unicode operators for mathematical notation. Here's how to type the
|
|
|
230
274
|
|
|
231
275
|
**Alternative:** Use the function equivalents that don't require special characters:
|
|
232
276
|
```klong
|
|
233
|
-
3∇f :" Using nabla
|
|
234
|
-
.jacobian(f;x) :" Instead of x∂f
|
|
277
|
+
3∇f :" Using nabla"
|
|
278
|
+
.jacobian(f;x) :" Instead of x∂f"
|
|
235
279
|
```
|
|
236
280
|
|
|
237
281
|
## Syntax Cheat Sheet
|
|
@@ -239,39 +283,40 @@ KlongPy uses Unicode operators for mathematical notation. Here's how to type the
|
|
|
239
283
|
Functions take up to 3 parameters, always named `x`, `y`, `z`:
|
|
240
284
|
|
|
241
285
|
```klong
|
|
242
|
-
:" Operators (right to left evaluation)
|
|
243
|
-
5+3*2 :" 11 (3*2 first, then +5)
|
|
244
|
-
+/[1 2 3] :" 6 (sum: + over /)
|
|
245
|
-
*/[1 2 3] :" 6 (product: * over /)
|
|
246
|
-
#[1 2 3] :" 3 (length)
|
|
247
|
-
|
|
|
248
|
-
&
|
|
249
|
-
|
|
250
|
-
:" Functions
|
|
251
|
-
avg::{(+/x)%#x} :" Monad (1 arg)
|
|
252
|
-
dot::{+/x*y} :" Dyad (2 args)
|
|
253
|
-
clip::{x|y&z}
|
|
254
|
-
|
|
255
|
-
:" Adverbs (modifiers)
|
|
256
|
-
f
|
|
257
|
-
1 2 3
|
|
258
|
-
+/[1 2 3] :" Over: fold/reduce
|
|
259
|
-
+\[1 2 3] :" Scan: running fold [1 3 6]
|
|
260
|
-
|
|
261
|
-
:" Autograd
|
|
286
|
+
:" Operators (right to left evaluation)"
|
|
287
|
+
5+3*2 :" 11 (3*2 first, then +5)"
|
|
288
|
+
+/[1 2 3] :" 6 (sum: + over /)"
|
|
289
|
+
*/[1 2 3] :" 6 (product: * over /)"
|
|
290
|
+
#[1 2 3] :" 3 (length)"
|
|
291
|
+
3|5 :" 5 (max)"
|
|
292
|
+
3&5 :" 3 (min)"
|
|
293
|
+
|
|
294
|
+
:" Functions"
|
|
295
|
+
avg::{(+/x)%#x} :" Monad (1 arg)"
|
|
296
|
+
dot::{+/x*y} :" Dyad (2 args)"
|
|
297
|
+
clip::{(x|y)&z} :" Triad (3 args): min(max(x,y),z)"
|
|
298
|
+
|
|
299
|
+
:" Adverbs (modifiers)"
|
|
300
|
+
f::{x^2}
|
|
301
|
+
f'[1 2 3] :" Each: apply f to each -> [1 4 9]"
|
|
302
|
+
+/[1 2 3] :" Over: fold/reduce -> 6"
|
|
303
|
+
+\[1 2 3] :" Scan: running fold -> [1 3 6]"
|
|
304
|
+
|
|
305
|
+
:" Autograd"
|
|
262
306
|
f::{x^2}
|
|
263
|
-
3∇f :" Numeric gradient at x=3 -> ~6.0
|
|
264
|
-
f:>3 :" Autograd (exact with torch) at x=3 -> 6.0
|
|
265
|
-
f
|
|
307
|
+
3∇f :" Numeric gradient at x=3 -> ~6.0"
|
|
308
|
+
f:>3 :" Autograd (exact with torch) at x=3 -> 6.0"
|
|
309
|
+
f::{+/x^2} :" Redefine f as sum-of-squares"
|
|
310
|
+
f:>[1 2 3] :" Gradient -> [2 4 6]"
|
|
266
311
|
|
|
267
|
-
:" Multi-parameter gradients
|
|
312
|
+
:" Multi-parameter gradients"
|
|
268
313
|
w::2.0; b::3.0
|
|
269
314
|
loss::{(w^2)+(b^2)}
|
|
270
|
-
loss:>[w b] :" Gradients for both -> [4.0 6.0]
|
|
315
|
+
loss:>[w b] :" Gradients for both -> [4.0 6.0]"
|
|
271
316
|
|
|
272
|
-
:" Jacobian (for vector functions)
|
|
273
|
-
g::{x^2} :" Element-wise square
|
|
274
|
-
[1 2]∂g :" Jacobian matrix -> [[2 0] [0 4]]
|
|
317
|
+
:" Jacobian (for vector functions)"
|
|
318
|
+
g::{x^2} :" Element-wise square"
|
|
319
|
+
[1 2]∂g :" Jacobian matrix -> [[2 0] [0 4]]"
|
|
275
320
|
```
|
|
276
321
|
|
|
277
322
|
## Examples
|
|
@@ -281,13 +326,13 @@ g::{x^2} :" Element-wise square
|
|
|
281
326
|
```klong
|
|
282
327
|
?> a::[1 2 3 4 5]
|
|
283
328
|
[1 2 3 4 5]
|
|
284
|
-
?> a*a :" Element-wise square
|
|
329
|
+
?> a*a :" Element-wise square"
|
|
285
330
|
[1 4 9 16 25]
|
|
286
|
-
?> +/a :" Sum
|
|
331
|
+
?> +/a :" Sum"
|
|
287
332
|
15
|
|
288
|
-
?> (*/a) :" Product
|
|
333
|
+
?> (*/a) :" Product"
|
|
289
334
|
120
|
|
290
|
-
?> avg::{(+/x)%#x} :" Define average
|
|
335
|
+
?> avg::{(+/x)%#x} :" Define average"
|
|
291
336
|
:monad
|
|
292
337
|
?> avg(a)
|
|
293
338
|
3.0
|
|
@@ -295,43 +340,49 @@ g::{x^2} :" Element-wise square
|
|
|
295
340
|
|
|
296
341
|
### 2. Gradient Descent
|
|
297
342
|
|
|
343
|
+
Minimize f(x) = (x-3)^2
|
|
344
|
+
|
|
345
|
+
(with PyTorch's autograd)
|
|
298
346
|
```klong
|
|
299
|
-
|
|
347
|
+
$ rlwrap kgpy --backend torch
|
|
300
348
|
?> f::{(x-3)^2}
|
|
301
349
|
:monad
|
|
302
|
-
?>
|
|
350
|
+
?> s::10.0; lr::0.1
|
|
303
351
|
0.1
|
|
304
|
-
?> {
|
|
305
|
-
[8.
|
|
352
|
+
?> {s::s-(lr*f:>s); s}'!10
|
|
353
|
+
[8.600000381469727 7.4800004959106445 6.584000587463379 5.8672003746032715 5.293760299682617 4.835008144378662 4.468006610870361 4.174405097961426 3.9395241737365723 3.751619338989258]
|
|
354
|
+
```
|
|
355
|
+
|
|
356
|
+
(Numerical differentiation)
|
|
357
|
+
```klong
|
|
358
|
+
$ rlwrap kgpy
|
|
359
|
+
?> f::{(x-3)^2}
|
|
360
|
+
:monad
|
|
361
|
+
?> s::10.0; lr::0.1
|
|
362
|
+
0.1
|
|
363
|
+
?> {s::s-(lr*f:>s); s}'!10
|
|
364
|
+
[8.60000000104776 7.480000001637279 6.584000001220716 5.867200000887465 5.2937600006031005 4.835008000393373 4.4680064002611175 4.174405120173077 3.939524096109306 3.7516192768605094]
|
|
306
365
|
```
|
|
307
366
|
|
|
308
367
|
### 3. Linear Regression
|
|
309
368
|
|
|
310
369
|
```klong
|
|
311
|
-
:" Data: y = 2*x + 3 + noise
|
|
370
|
+
:" Data: y = 2*x + 3 + noise"
|
|
312
371
|
X::[1 2 3 4 5]
|
|
313
372
|
Y::[5.1 6.9 9.2 10.8 13.1]
|
|
314
373
|
|
|
315
|
-
:" Model parameters
|
|
374
|
+
:" Model parameters"
|
|
316
375
|
w::0.0; b::0.0
|
|
317
376
|
|
|
318
|
-
:" Loss function
|
|
377
|
+
:" Loss function"
|
|
319
378
|
mse::{(+/(((w*X)+b)-Y)^2)%#X}
|
|
320
379
|
|
|
321
|
-
:" Train with multi-parameter gradients
|
|
380
|
+
:" Train with multi-parameter gradients"
|
|
322
381
|
lr::0.01
|
|
323
382
|
{grads::mse:>[w b]; w::w-(lr*grads@0); b::b-(lr*grads@1)}'!1000
|
|
324
383
|
|
|
325
384
|
.d("Learned: w="); .d(w); .d(" b="); .p(b)
|
|
326
|
-
|
|
327
|
-
```
|
|
328
|
-
|
|
329
|
-
**Or with custom optimizer (copy from examples/autograd/optimizers.py):**
|
|
330
|
-
```klong
|
|
331
|
-
.pyf("optimizers";"AdamOptimizer")
|
|
332
|
-
w::0.0; b::0.0
|
|
333
|
-
opt::AdamOptimizer(klong;["w" "b"];:{["lr" 0.01]})
|
|
334
|
-
{opt(mse)}'!1000 :" Optimizer handles gradient computation
|
|
385
|
+
Learned: w=2.01 b=2.97
|
|
335
386
|
```
|
|
336
387
|
|
|
337
388
|
### 4. Database Operations
|
|
@@ -362,11 +413,11 @@ Carol 35
|
|
|
362
413
|
|
|
363
414
|
**Client:**
|
|
364
415
|
```klong
|
|
365
|
-
?> f::.cli(8888) :" Connect to server
|
|
416
|
+
?> f::.cli(8888) :" Connect to server"
|
|
366
417
|
remote[localhost:8888]:fn
|
|
367
|
-
?> myavg::f(:avg) :" Get remote function reference
|
|
418
|
+
?> myavg::f(:avg) :" Get remote function reference"
|
|
368
419
|
remote[localhost:8888]:fn:avg:monad
|
|
369
|
-
?> myavg(!1000000) :" Execute on server
|
|
420
|
+
?> myavg(!1000000) :" Execute on server"
|
|
370
421
|
499999.5
|
|
371
422
|
```
|
|
372
423
|
|
|
@@ -402,7 +453,7 @@ pip install "klongpy[repl]"
|
|
|
402
453
|
### With PyTorch Autograd (Recommended)
|
|
403
454
|
```bash
|
|
404
455
|
pip install "klongpy[torch]"
|
|
405
|
-
|
|
456
|
+
kgpy --backend torch # Enable torch backend
|
|
406
457
|
```
|
|
407
458
|
|
|
408
459
|
### Web / DB / WebSockets Extras
|
|
@@ -428,7 +479,7 @@ KlongPy stands on the shoulders of giants:
|
|
|
428
479
|
- **[NumPy](https://numpy.org/)**: The "Iverson Ghost" in Python's scientific stack
|
|
429
480
|
- **[PyTorch](https://pytorch.org/)**: Automatic differentiation and GPU acceleration
|
|
430
481
|
|
|
431
|
-
KlongPy combines Klong's simplicity with Python's ecosystem and PyTorch's autograd
|
|
482
|
+
KlongPy combines Klong's simplicity with Python's ecosystem and PyTorch's autograd creating something new: an array language where gradients are first-class citizens.
|
|
432
483
|
|
|
433
484
|
## Use Cases
|
|
434
485
|
|
|
@@ -466,7 +517,7 @@ python3 -m pytest tests/ # Run tests
|
|
|
466
517
|
|
|
467
518
|
This project does not accept direct issue submissions.
|
|
468
519
|
|
|
469
|
-
Please start with a GitHub Discussion.
|
|
520
|
+
Please start with a GitHub Discussion.
|
|
470
521
|
Maintainers will promote validated discussions to Issues.
|
|
471
522
|
|
|
472
523
|
Active contributors may be invited to open issues directly.
|