klongpy 0.6.9__tar.gz → 0.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- klongpy-0.7.0/PKG-INFO +493 -0
- klongpy-0.7.0/README.md +438 -0
- klongpy-0.7.0/klongpy/__init__.py +20 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/adverbs.py +5 -5
- klongpy-0.7.0/klongpy/autograd.py +308 -0
- klongpy-0.7.0/klongpy/backend.py +171 -0
- klongpy-0.7.0/klongpy/backends/__init__.py +94 -0
- klongpy-0.7.0/klongpy/backends/base.py +320 -0
- klongpy-0.7.0/klongpy/backends/numpy_backend.py +122 -0
- klongpy-0.7.0/klongpy/backends/torch_backend.py +995 -0
- klongpy-0.6.9/scripts/kgpy → klongpy-0.7.0/klongpy/cli.py +65 -88
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/core.py +228 -108
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/db/sys_fn_db.py +4 -3
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/dyads.py +159 -28
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/interpreter.py +31 -3
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/monads.py +39 -3
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/repl.py +21 -3
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/sys_fn.py +128 -17
- klongpy-0.7.0/klongpy/sys_fn_autograd.py +290 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/sys_fn_ipc.py +18 -6
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/sys_fn_timer.py +13 -3
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/web/sys_fn_web.py +14 -4
- klongpy-0.7.0/klongpy.egg-info/PKG-INFO +493 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy.egg-info/SOURCES.txt +22 -3
- klongpy-0.7.0/klongpy.egg-info/entry_points.txt +2 -0
- klongpy-0.7.0/klongpy.egg-info/requires.txt +37 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy.egg-info/top_level.txt +0 -1
- klongpy-0.7.0/pyproject.toml +81 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/gen_py_suite.py +8 -2
- klongpy-0.7.0/tests/perf_vector.py +163 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_accel.py +2 -1
- klongpy-0.7.0/tests/test_autograd_parametrized.py +563 -0
- klongpy-0.7.0/tests/test_backend.py +393 -0
- klongpy-0.7.0/tests/test_cli_exit.py +47 -0
- klongpy-0.7.0/tests/test_core_fn_wrapper.py +48 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_eval_monad_list.py +4 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_examples.py +2 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_extra_suite.py +35 -5
- klongpy-0.7.0/tests/test_helpers.py +103 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_interop.py +9 -1
- klongpy-0.7.0/tests/test_kg_asarray.py +128 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_kgtests.py +3 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_prog.py +3 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_reshape_strings.py +2 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_suite.py +25 -3
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_suite_file.py +10 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_sys_fn.py +7 -1
- klongpy-0.7.0/tests/test_sys_fn_coverage.py +307 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_sys_fn_db.py +4 -1
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_sys_fn_ipc.py +54 -0
- klongpy-0.7.0/tests/test_sys_fn_ipc_coverage.py +151 -0
- klongpy-0.7.0/tests/test_sys_fn_kvs.py +161 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_sys_fn_timer.py +51 -0
- klongpy-0.7.0/tests/test_sys_fn_web.py +86 -0
- klongpy-0.7.0/tests/test_sys_fn_web_coverage.py +69 -0
- klongpy-0.7.0/tests/test_sys_fn_ws.py +54 -0
- klongpy-0.7.0/tests/test_sys_fn_ws_coverage.py +184 -0
- klongpy-0.7.0/tests/test_torch_backend.py +318 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_util.py +15 -5
- klongpy-0.7.0/tests/test_utils_coverage.py +82 -0
- klongpy-0.7.0/tests/utils.py +254 -0
- klongpy-0.6.9/PKG-INFO +0 -448
- klongpy-0.6.9/README.md +0 -394
- klongpy-0.6.9/klongpy/__init__.py +0 -2
- klongpy-0.6.9/klongpy/backend.py +0 -103
- klongpy-0.6.9/klongpy.egg-info/PKG-INFO +0 -448
- klongpy-0.6.9/klongpy.egg-info/not-zip-safe +0 -1
- klongpy-0.6.9/klongpy.egg-info/requires.txt +0 -45
- klongpy-0.6.9/setup.py +0 -47
- klongpy-0.6.9/tests/perf_vector.py +0 -40
- klongpy-0.6.9/tests/test_kg_asarray.py +0 -94
- klongpy-0.6.9/tests/test_sys_fn_web.py +0 -50
- klongpy-0.6.9/tests/utils.py +0 -126
- {klongpy-0.6.9 → klongpy-0.7.0}/LICENSE +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/MANIFEST.in +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/db/__init__.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/db/df_cache.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/db/file_cache.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/db/helpers.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/db/sys_fn_kvs.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/csv.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/edt.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/eigenv.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/help.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/huffman.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/math.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/nstat.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/print.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/set.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/spline.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/time.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/lib/util.kg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/sys_var.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/utils.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/web/__init__.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/ws/__init__.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy/ws/sys_fn_ws.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/klongpy.egg-info/dependency_links.txt +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/setup.cfg +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/__init__.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/gen_join_over.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/gen_test_fn.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_async.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_avg.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_duckdb.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_gen.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_ipc_overhead.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_join.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_load.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_prog.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_serdes.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/perf_sys_fn_db.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_df_cache.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_file_cache.py +0 -0
- {klongpy-0.6.9 → klongpy-0.7.0}/tests/test_known_bugs.py +0 -0
klongpy-0.7.0/PKG-INFO
ADDED
|
@@ -0,0 +1,493 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: klongpy
|
|
3
|
+
Version: 0.7.0
|
|
4
|
+
Summary: High-Performance Klong array language with rich Python integration.
|
|
5
|
+
Author: Brian Guarraci
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, http://klongpy.org
|
|
8
|
+
Project-URL: Repository, https://github.com/briangu/klongpy
|
|
9
|
+
Project-URL: Documentation, http://klongpy.org
|
|
10
|
+
Keywords: klong,array,language,interpreter,numpy,torch
|
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
|
14
|
+
Classifier: Operating System :: OS Independent
|
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
20
|
+
Classifier: Topic :: Scientific/Engineering
|
|
21
|
+
Classifier: Topic :: Software Development :: Interpreters
|
|
22
|
+
Requires-Python: <3.13,>=3.9
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
License-File: LICENSE
|
|
25
|
+
Requires-Dist: numpy>=2.0
|
|
26
|
+
Provides-Extra: torch
|
|
27
|
+
Requires-Dist: torch; extra == "torch"
|
|
28
|
+
Provides-Extra: repl
|
|
29
|
+
Requires-Dist: colorama==0.4.6; extra == "repl"
|
|
30
|
+
Provides-Extra: web
|
|
31
|
+
Requires-Dist: aiohttp==3.9.4; extra == "web"
|
|
32
|
+
Provides-Extra: db
|
|
33
|
+
Requires-Dist: pandas==2.2.2; extra == "db"
|
|
34
|
+
Requires-Dist: duckdb==1.3.0; extra == "db"
|
|
35
|
+
Provides-Extra: ws
|
|
36
|
+
Requires-Dist: websockets==12.0; extra == "ws"
|
|
37
|
+
Provides-Extra: docs
|
|
38
|
+
Requires-Dist: mkdocs>=1.6; extra == "docs"
|
|
39
|
+
Requires-Dist: mkdocs-material>=9.7; extra == "docs"
|
|
40
|
+
Provides-Extra: all
|
|
41
|
+
Requires-Dist: torch; extra == "all"
|
|
42
|
+
Requires-Dist: colorama==0.4.6; extra == "all"
|
|
43
|
+
Requires-Dist: aiohttp==3.9.4; extra == "all"
|
|
44
|
+
Requires-Dist: pandas==2.2.2; extra == "all"
|
|
45
|
+
Requires-Dist: duckdb==1.3.0; extra == "all"
|
|
46
|
+
Requires-Dist: websockets==12.0; extra == "all"
|
|
47
|
+
Requires-Dist: mkdocs>=1.6; extra == "all"
|
|
48
|
+
Requires-Dist: mkdocs-material>=9.7; extra == "all"
|
|
49
|
+
Provides-Extra: dev
|
|
50
|
+
Requires-Dist: pytest>=7.0; extra == "dev"
|
|
51
|
+
Requires-Dist: pytest-asyncio>=0.21; extra == "dev"
|
|
52
|
+
Requires-Dist: mkdocs>=1.6; extra == "dev"
|
|
53
|
+
Requires-Dist: mkdocs-material>=9.7; extra == "dev"
|
|
54
|
+
Dynamic: license-file
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+

|
|
58
|
+
[](https://img.shields.io/github/last-commit/briangu/klongpy)
|
|
59
|
+
[](https://libraries.io/github/briangu/klongpy)
|
|
60
|
+
[](https://github.com/briangu/klongpy/issues)
|
|
61
|
+
[](https://img.shields.io/github/repo-size/briangu/klongpy)
|
|
62
|
+
[](https://star-history.com/#briangu/klongpy)
|
|
63
|
+
|
|
64
|
+
[](https://github.com/briangu/klongpy/releases)
|
|
65
|
+
[](https://pepy.tech/project/klongpy)
|
|
66
|
+
[](https://opensource.org/licenses/MIT)
|
|
67
|
+
|
|
68
|
+
# KlongPy: A High-Performance Array Language with Autograd
|
|
69
|
+
|
|
70
|
+
KlongPy brings gradient-based programming to an already-succinct array language, so you can differentiate compact array expressions directly. It's also a batteries-included system with IPC, DuckDB-backed database tooling, web/websocket support, and other integrations exposed seamlessly from the language.
|
|
71
|
+
|
|
72
|
+
Backends include NumPy and optional PyTorch (CPU, CUDA, and Apple MPS). When PyTorch is enabled, gradients use autograd; otherwise numeric differentiation is the default.
|
|
73
|
+
|
|
74
|
+
**PyTorch gradient descent (10+ lines):**
|
|
75
|
+
```python
|
|
76
|
+
import torch
|
|
77
|
+
x = torch.tensor(5.0, requires_grad=True)
|
|
78
|
+
optimizer = torch.optim.SGD([x], lr=0.1)
|
|
79
|
+
for _ in range(100):
|
|
80
|
+
loss = x ** 2
|
|
81
|
+
optimizer.zero_grad()
|
|
82
|
+
loss.backward()
|
|
83
|
+
optimizer.step()
|
|
84
|
+
print(x) # ~0
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
**KlongPy gradient descent (2 lines):**
|
|
88
|
+
```klong
|
|
89
|
+
f::{x^2}; x::5.0
|
|
90
|
+
{x::x-(0.1*f:>x)}'!100 :" x -> 0
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
**Or with custom optimizer (copy from examples/):**
|
|
94
|
+
```klong
|
|
95
|
+
.pyf("optimizers";"SGDOptimizer")
|
|
96
|
+
x::5.0; opt::SGDOptimizer(klong;["x"];:{["lr" 0.1]})
|
|
97
|
+
{opt({x^2})}'!100 :" x -> 0
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
This isn't just shorter—it's a fundamentally different way to express computation. Array languages like APL, K, and Q revolutionized finance and data analysis with their concise vectorized operations. KlongPy adds native autograd, making gradients first-class citizens in an array language.
|
|
101
|
+
|
|
102
|
+
## Quick Install
|
|
103
|
+
|
|
104
|
+
```bash
|
|
105
|
+
# REPL + NumPy backend (pick one option below)
|
|
106
|
+
pip install "klongpy[repl]"
|
|
107
|
+
kgpy
|
|
108
|
+
|
|
109
|
+
# Enable torch backend (autograd + GPU)
|
|
110
|
+
pip install "klongpy[torch]"
|
|
111
|
+
USE_TORCH=1 kgpy # or KLONGPY_BACKEND=torch
|
|
112
|
+
|
|
113
|
+
# Everything (web, db, websockets, torch, repl)
|
|
114
|
+
pip install "klongpy[all]"
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
## Why KlongPy?
|
|
118
|
+
|
|
119
|
+
### For Quants and Traders
|
|
120
|
+
|
|
121
|
+
Build self-learning trading strategies in a language designed for time series:
|
|
122
|
+
|
|
123
|
+
```klong
|
|
124
|
+
:" Moving average crossover with learned parameters
|
|
125
|
+
sma::{(+/x)%#x}
|
|
126
|
+
signal::{(sma(n#x))-(sma(m#x))} :" Difference of moving averages
|
|
127
|
+
loss::{+/(signal(prices)-returns)^2}
|
|
128
|
+
|
|
129
|
+
:" Learn optimal window sizes via gradient descent
|
|
130
|
+
loss:>n :" Gradient w.r.t. short window
|
|
131
|
+
loss:>m :" Gradient w.r.t. long window
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
### For ML Researchers
|
|
135
|
+
|
|
136
|
+
Neural networks in pure array notation:
|
|
137
|
+
|
|
138
|
+
```klong
|
|
139
|
+
:" Single layer: sigmoid(W*x + b)
|
|
140
|
+
sigmoid::{1%(1+exp(0-x))}
|
|
141
|
+
forward::{sigmoid((w1*x)+b1)}
|
|
142
|
+
loss::{+/(forward'X - Y)^2}
|
|
143
|
+
|
|
144
|
+
:" Train with multi-param gradients
|
|
145
|
+
{grads::loss:>[w1 b1]; w1::w1-(lr*grads@0); b1::b1-(lr*grads@1)}'!1000
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
### For Scientists
|
|
149
|
+
|
|
150
|
+
Express mathematics directly:
|
|
151
|
+
|
|
152
|
+
```klong
|
|
153
|
+
:" Gradient of f(x,y,z) = x^2 + y^2 + z^2 at [1,2,3]
|
|
154
|
+
f::{+/x^2}
|
|
155
|
+
f:>[1 2 3] :" [2 4 6] - exact gradient via autograd
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
## The Array Language Advantage
|
|
159
|
+
|
|
160
|
+
Array languages express *what* you want, not *how* to compute it. This enables automatic optimization:
|
|
161
|
+
|
|
162
|
+
| Operation | Python | KlongPy |
|
|
163
|
+
|-----------|--------|---------|
|
|
164
|
+
| Sum an array | `sum(a)` | `+/a` |
|
|
165
|
+
| Running sum | `np.cumsum(a)` | `+\a` |
|
|
166
|
+
| Dot product | `np.dot(a,b)` | `+/a*b` |
|
|
167
|
+
| Average | `sum(a)/len(a)` | `(+/a)%#a` |
|
|
168
|
+
| Gradient | *10+ lines* | `f:>x` |
|
|
169
|
+
| Multi-param grad | *20+ lines* | `loss:>[w b]` |
|
|
170
|
+
| Jacobian | *15+ lines* | `x∂f` |
|
|
171
|
+
| Optimizer | *10+ lines* | `{w::w-(lr*f:>w)}` |
|
|
172
|
+
|
|
173
|
+
KlongPy inherits from the [APL](https://en.wikipedia.org/wiki/APL_(programming_language)) family tree (APL → J → K/Q → Klong), adding Python integration and automatic differentiation.
|
|
174
|
+
|
|
175
|
+
## Performance: NumPy vs PyTorch Backend
|
|
176
|
+
|
|
177
|
+
The PyTorch backend provides significant speedups for large arrays with GPU acceleration:
|
|
178
|
+
|
|
179
|
+
```
|
|
180
|
+
Benchmark NumPy (ms) Torch (ms) Speedup
|
|
181
|
+
----------------------------------------------------------------------
|
|
182
|
+
vector_add_1M 0.327 0.065 5.02x (torch)
|
|
183
|
+
compound_expr_1M 0.633 0.070 9.00x (torch)
|
|
184
|
+
sum_1M 0.246 0.087 2.84x (torch)
|
|
185
|
+
grade_up_100K 0.588 0.199 2.96x (torch)
|
|
186
|
+
enumerate_1M 0.141 0.050 2.83x (torch)
|
|
187
|
+
```
|
|
188
|
+
|
|
189
|
+
*Benchmarks on Apple M1 with MPS. Run `python tests/perf_backend.py --compare` for your system.*
|
|
190
|
+
|
|
191
|
+
## Complete Feature Set
|
|
192
|
+
|
|
193
|
+
KlongPy isn't just an autograd experiment—it's a production-ready platform with kdb+/Q-inspired features:
|
|
194
|
+
|
|
195
|
+
### Core Language
|
|
196
|
+
- **Vectorized Operations**: NumPy/PyTorch-powered bulk array operations
|
|
197
|
+
- **Automatic Differentiation**: Native `:>` operator for exact gradients
|
|
198
|
+
- **GPU Acceleration**: CUDA and Apple MPS support via PyTorch
|
|
199
|
+
- **Python Integration**: Import any Python library with `.py()` and `.pyf()`
|
|
200
|
+
|
|
201
|
+
### Data Infrastructure (kdb+/Q-like)
|
|
202
|
+
- **[Fast Columnar Database](docs/fast_columnar_database.md)**: Zero-copy DuckDB integration for SQL on arrays
|
|
203
|
+
- **[Inter-Process Communication](docs/ipc_capabilities.md)**: Build ticker plants and distributed systems
|
|
204
|
+
- **[Table & Key-Value Store](docs/table_and_key_value_stores.md)**: Persistent storage for tables and data
|
|
205
|
+
- **[Web Server](docs/web_server.md)**: Built-in HTTP server for APIs and dashboards
|
|
206
|
+
- **[WebSockets](docs/websockets.md)**: Connect to WebSocket servers and handle messages in KlongPy
|
|
207
|
+
- **[Timers](docs/timer.md)**: Scheduled execution for periodic tasks
|
|
208
|
+
|
|
209
|
+
### Documentation
|
|
210
|
+
- **[Quick Start Guide](docs/quick-start.md)**: Get running in 5 minutes
|
|
211
|
+
- **[PyTorch Backend & Autograd](docs/torch_backend.md)**: Complete autograd reference
|
|
212
|
+
- **[Operator Reference](docs/operators.md)**: All language operators
|
|
213
|
+
- **[Performance Guide](docs/performance.md)**: Optimization tips
|
|
214
|
+
|
|
215
|
+
Full documentation: [https://briangu.github.io/klongpy](https://briangu.github.io/klongpy)
|
|
216
|
+
|
|
217
|
+
## Typing Special Characters
|
|
218
|
+
|
|
219
|
+
KlongPy uses Unicode operators for mathematical notation. Here's how to type them:
|
|
220
|
+
|
|
221
|
+
| Symbol | Name | Mac | Windows | Description |
|
|
222
|
+
|--------|------|-----|---------|-------------|
|
|
223
|
+
| `∇` | Nabla | `Option + v` then select, or Character Viewer | `Alt + 8711` (numpad) | Numeric gradient |
|
|
224
|
+
| `∂` | Partial | `Option + d` | `Alt + 8706` (numpad) | Jacobian operator |
|
|
225
|
+
|
|
226
|
+
**Mac Tips:**
|
|
227
|
+
- **Option + d** types `∂` directly
|
|
228
|
+
- For `∇`, open Character Viewer with **Ctrl + Cmd + Space**, search "nabla"
|
|
229
|
+
- Or simply copy-paste: `∇` `∂`
|
|
230
|
+
|
|
231
|
+
**Alternative:** Use the function equivalents that don't require special characters:
|
|
232
|
+
```klong
|
|
233
|
+
3∇f :" Using nabla
|
|
234
|
+
.jacobian(f;x) :" Instead of x∂f
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
## Syntax Cheat Sheet
|
|
238
|
+
|
|
239
|
+
Functions take up to 3 parameters, always named `x`, `y`, `z`:
|
|
240
|
+
|
|
241
|
+
```klong
|
|
242
|
+
:" Operators (right to left evaluation)
|
|
243
|
+
5+3*2 :" 11 (3*2 first, then +5)
|
|
244
|
+
+/[1 2 3] :" 6 (sum: + over /)
|
|
245
|
+
*/[1 2 3] :" 6 (product: * over /)
|
|
246
|
+
#[1 2 3] :" 3 (length)
|
|
247
|
+
|[3 1 2] :" [1 2 3] (sort)
|
|
248
|
+
&[1 0 1] :" [0 2] (where/indices of true)
|
|
249
|
+
|
|
250
|
+
:" Functions
|
|
251
|
+
avg::{(+/x)%#x} :" Monad (1 arg)
|
|
252
|
+
dot::{+/x*y} :" Dyad (2 args)
|
|
253
|
+
clip::{x|y&z} :" Triad (3 args): min(max(x,y),z)
|
|
254
|
+
|
|
255
|
+
:" Adverbs (modifiers)
|
|
256
|
+
f'[1 2 3] :" Each: apply f to each element
|
|
257
|
+
1 2 3 +'[10 20 30] :" Each-pair: [11 22 33]
|
|
258
|
+
+/[1 2 3] :" Over: fold/reduce
|
|
259
|
+
+\[1 2 3] :" Scan: running fold [1 3 6]
|
|
260
|
+
|
|
261
|
+
:" Autograd
|
|
262
|
+
f::{x^2}
|
|
263
|
+
3∇f :" Numeric gradient at x=3 -> ~6.0
|
|
264
|
+
f:>3 :" Autograd (exact with torch) at x=3 -> 6.0
|
|
265
|
+
f:>[1 2 3] :" Gradient of sum-of-squares -> [2 4 6]
|
|
266
|
+
|
|
267
|
+
:" Multi-parameter gradients
|
|
268
|
+
w::2.0; b::3.0
|
|
269
|
+
loss::{(w^2)+(b^2)}
|
|
270
|
+
loss:>[w b] :" Gradients for both -> [4.0 6.0]
|
|
271
|
+
|
|
272
|
+
:" Jacobian (for vector functions)
|
|
273
|
+
g::{x^2} :" Element-wise square
|
|
274
|
+
[1 2]∂g :" Jacobian matrix -> [[2 0] [0 4]]
|
|
275
|
+
```
|
|
276
|
+
|
|
277
|
+
## Examples
|
|
278
|
+
|
|
279
|
+
### 1. Basic Array Operations
|
|
280
|
+
|
|
281
|
+
```klong
|
|
282
|
+
?> a::[1 2 3 4 5]
|
|
283
|
+
[1 2 3 4 5]
|
|
284
|
+
?> a*a :" Element-wise square
|
|
285
|
+
[1 4 9 16 25]
|
|
286
|
+
?> +/a :" Sum
|
|
287
|
+
15
|
|
288
|
+
?> (*/a) :" Product
|
|
289
|
+
120
|
|
290
|
+
?> avg::{(+/x)%#x} :" Define average
|
|
291
|
+
:monad
|
|
292
|
+
?> avg(a)
|
|
293
|
+
3.0
|
|
294
|
+
```
|
|
295
|
+
|
|
296
|
+
### 2. Gradient Descent
|
|
297
|
+
|
|
298
|
+
```klong
|
|
299
|
+
:" Minimize f(x) = (x-3)^2
|
|
300
|
+
?> f::{(x-3)^2}
|
|
301
|
+
:monad
|
|
302
|
+
?> x::10.0; lr::0.1
|
|
303
|
+
0.1
|
|
304
|
+
?> {x::x-(lr*f:>x); x}'!10 :" 10 gradient steps
|
|
305
|
+
[8.6 7.48 6.584 5.867 5.294 4.835 4.468 4.175 3.940 3.752]
|
|
306
|
+
```
|
|
307
|
+
|
|
308
|
+
### 3. Linear Regression
|
|
309
|
+
|
|
310
|
+
```klong
|
|
311
|
+
:" Data: y = 2*x + 3 + noise
|
|
312
|
+
X::[1 2 3 4 5]
|
|
313
|
+
Y::[5.1 6.9 9.2 10.8 13.1]
|
|
314
|
+
|
|
315
|
+
:" Model parameters
|
|
316
|
+
w::0.0; b::0.0
|
|
317
|
+
|
|
318
|
+
:" Loss function
|
|
319
|
+
mse::{(+/(((w*X)+b)-Y)^2)%#X}
|
|
320
|
+
|
|
321
|
+
:" Train with multi-parameter gradients
|
|
322
|
+
lr::0.01
|
|
323
|
+
{grads::mse:>[w b]; w::w-(lr*grads@0); b::b-(lr*grads@1)}'!1000
|
|
324
|
+
|
|
325
|
+
.d("Learned: w="); .d(w); .d(" b="); .p(b)
|
|
326
|
+
:" Output: Learned: w=2.02 b=2.94
|
|
327
|
+
```
|
|
328
|
+
|
|
329
|
+
**Or with custom optimizer (copy from examples/autograd/optimizers.py):**
|
|
330
|
+
```klong
|
|
331
|
+
.pyf("optimizers";"AdamOptimizer")
|
|
332
|
+
w::0.0; b::0.0
|
|
333
|
+
opt::AdamOptimizer(klong;["w" "b"];:{["lr" 0.01]})
|
|
334
|
+
{opt(mse)}'!1000 :" Optimizer handles gradient computation
|
|
335
|
+
```
|
|
336
|
+
|
|
337
|
+
### 4. Database Operations
|
|
338
|
+
|
|
339
|
+
```klong
|
|
340
|
+
?> .py("klongpy.db")
|
|
341
|
+
?> t::.table([[\"name\" [\"Alice\" \"Bob\" \"Carol\"]] [\"age\" [25 30 35]]])
|
|
342
|
+
name age
|
|
343
|
+
Alice 25
|
|
344
|
+
Bob 30
|
|
345
|
+
Carol 35
|
|
346
|
+
?> db::.db(:{},\"T\",t)
|
|
347
|
+
?> db(\"SELECT * FROM T WHERE age > 27\")
|
|
348
|
+
name age
|
|
349
|
+
Bob 30
|
|
350
|
+
Carol 35
|
|
351
|
+
```
|
|
352
|
+
|
|
353
|
+
### 5. IPC: Distributed Computing
|
|
354
|
+
|
|
355
|
+
**Server:**
|
|
356
|
+
```klong
|
|
357
|
+
?> avg::{(+/x)%#x}
|
|
358
|
+
:monad
|
|
359
|
+
?> .srv(8888)
|
|
360
|
+
1
|
|
361
|
+
```
|
|
362
|
+
|
|
363
|
+
**Client:**
|
|
364
|
+
```klong
|
|
365
|
+
?> f::.cli(8888) :" Connect to server
|
|
366
|
+
remote[localhost:8888]:fn
|
|
367
|
+
?> myavg::f(:avg) :" Get remote function reference
|
|
368
|
+
remote[localhost:8888]:fn:avg:monad
|
|
369
|
+
?> myavg(!1000000) :" Execute on server
|
|
370
|
+
499999.5
|
|
371
|
+
```
|
|
372
|
+
|
|
373
|
+
### 6. Web Server
|
|
374
|
+
|
|
375
|
+
```klong
|
|
376
|
+
.py("klongpy.web")
|
|
377
|
+
data::!10
|
|
378
|
+
index::{x; "Hello from KlongPy! Data: ",data}
|
|
379
|
+
get:::{}; get,"/",index
|
|
380
|
+
post:::{}
|
|
381
|
+
h::.web(8888;get;post)
|
|
382
|
+
.p("Server ready at http://localhost:8888")
|
|
383
|
+
```
|
|
384
|
+
|
|
385
|
+
```bash
|
|
386
|
+
$ curl http://localhost:8888
|
|
387
|
+
['Hello from KlongPy! Data: ' 0 1 2 3 4 5 6 7 8 9]
|
|
388
|
+
```
|
|
389
|
+
|
|
390
|
+
## Installation Options
|
|
391
|
+
|
|
392
|
+
### Basic Runtime (NumPy only)
|
|
393
|
+
```bash
|
|
394
|
+
pip install klongpy
|
|
395
|
+
```
|
|
396
|
+
|
|
397
|
+
### REPL Support
|
|
398
|
+
```bash
|
|
399
|
+
pip install "klongpy[repl]"
|
|
400
|
+
```
|
|
401
|
+
|
|
402
|
+
### With PyTorch Autograd (Recommended)
|
|
403
|
+
```bash
|
|
404
|
+
pip install "klongpy[torch]"
|
|
405
|
+
USE_TORCH=1 kgpy # Enable torch backend (or KLONGPY_BACKEND=torch)
|
|
406
|
+
```
|
|
407
|
+
|
|
408
|
+
### Web / DB / WebSockets Extras
|
|
409
|
+
```bash
|
|
410
|
+
pip install "klongpy[web]"
|
|
411
|
+
pip install "klongpy[db]"
|
|
412
|
+
pip install "klongpy[ws]"
|
|
413
|
+
```
|
|
414
|
+
|
|
415
|
+
### Full Installation (REPL, DB, Web, WebSockets, Torch)
|
|
416
|
+
```bash
|
|
417
|
+
pip install "klongpy[all]"
|
|
418
|
+
```
|
|
419
|
+
|
|
420
|
+
## Lineage and Inspiration
|
|
421
|
+
|
|
422
|
+
KlongPy stands on the shoulders of giants:
|
|
423
|
+
|
|
424
|
+
- **[APL](https://en.wikipedia.org/wiki/APL_(programming_language))** (1966): Ken Iverson's revolutionary notation
|
|
425
|
+
- **[J](https://www.jsoftware.com/)**: ASCII-friendly APL successor
|
|
426
|
+
- **[K/Q/kdb+](https://code.kx.com/)**: High-performance time series and trading systems
|
|
427
|
+
- **[Klong](https://t3x.org/klong)**: Nils M Holm's elegant, accessible array language
|
|
428
|
+
- **[NumPy](https://numpy.org/)**: The "Iverson Ghost" in Python's scientific stack
|
|
429
|
+
- **[PyTorch](https://pytorch.org/)**: Automatic differentiation and GPU acceleration
|
|
430
|
+
|
|
431
|
+
KlongPy combines Klong's simplicity with Python's ecosystem and PyTorch's autograd—creating something new: an array language where gradients are first-class citizens.
|
|
432
|
+
|
|
433
|
+
## Use Cases
|
|
434
|
+
|
|
435
|
+
- **Quantitative Finance**: Self-optimizing trading strategies, risk models, portfolio optimization
|
|
436
|
+
- **Machine Learning**: Neural networks, gradient descent, optimization in minimal code
|
|
437
|
+
- **Scientific Computing**: Physics simulations, numerical methods, data analysis
|
|
438
|
+
- **Time Series Analysis**: Signal processing, feature engineering, streaming data
|
|
439
|
+
- **Rapid Prototyping**: Express complex algorithms in few lines, then optimize
|
|
440
|
+
|
|
441
|
+
## Status
|
|
442
|
+
|
|
443
|
+
KlongPy is a superset of the Klong array language, passing all Klong integration tests plus additional test suites. The PyTorch backend provides GPU acceleration (CUDA, MPS) and automatic differentiation.
|
|
444
|
+
|
|
445
|
+
Ongoing development:
|
|
446
|
+
- Expanded torch backend coverage
|
|
447
|
+
- Additional built-in tools and integrations
|
|
448
|
+
- Improved error messages and debugging
|
|
449
|
+
|
|
450
|
+
## Related Projects
|
|
451
|
+
|
|
452
|
+
- [Klupyter](https://github.com/briangu/klupyter) - KlongPy in Jupyter Notebooks
|
|
453
|
+
- [VS Code Syntax Highlighting](https://github.com/briangu/klongpy-vscode)
|
|
454
|
+
- [Advent of Code Solutions](https://github.com/briangu/aoc)
|
|
455
|
+
|
|
456
|
+
## Development
|
|
457
|
+
|
|
458
|
+
```bash
|
|
459
|
+
git clone https://github.com/briangu/klongpy.git
|
|
460
|
+
cd klongpy
|
|
461
|
+
pip install -e ".[dev]" # Install in editable mode with dev dependencies
|
|
462
|
+
python3 -m pytest tests/ # Run tests
|
|
463
|
+
```
|
|
464
|
+
|
|
465
|
+
## Issues
|
|
466
|
+
|
|
467
|
+
This project does not accept direct issue submissions.
|
|
468
|
+
|
|
469
|
+
Please start with a GitHub Discussion.
|
|
470
|
+
Maintainers will promote validated discussions to Issues.
|
|
471
|
+
|
|
472
|
+
Active contributors may be invited to open issues directly.
|
|
473
|
+
|
|
474
|
+
## Contributors
|
|
475
|
+
|
|
476
|
+
See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution workflow, discussion-first policy, and code standards.
|
|
477
|
+
|
|
478
|
+
## Documentation
|
|
479
|
+
|
|
480
|
+
```bash
|
|
481
|
+
# Install docs tooling
|
|
482
|
+
pip install -e ".[docs]"
|
|
483
|
+
|
|
484
|
+
# Build the site into ./site
|
|
485
|
+
mkdocs build
|
|
486
|
+
|
|
487
|
+
# Serve locally with live reload
|
|
488
|
+
mkdocs serve
|
|
489
|
+
```
|
|
490
|
+
|
|
491
|
+
## Acknowledgements
|
|
492
|
+
|
|
493
|
+
Huge thanks to [Nils M Holm](https://t3x.org) for creating Klong and writing the [Klong Book](https://t3x.org/klong/book.html), which made this project possible.
|