datahike-browser-tests 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.circleci/config.yml +405 -0
- package/.circleci/scripts/gen_ci.clj +194 -0
- package/.cirrus.yml +60 -0
- package/.clj-kondo/babashka/sci/config.edn +1 -0
- package/.clj-kondo/babashka/sci/sci/core.clj +9 -0
- package/.clj-kondo/config.edn +95 -0
- package/.dir-locals.el +2 -0
- package/.github/FUNDING.yml +3 -0
- package/.github/ISSUE_TEMPLATE/1-bug-report.yml +68 -0
- package/.github/ISSUE_TEMPLATE/2-feature-request.yml +28 -0
- package/.github/ISSUE_TEMPLATE/config.yml +6 -0
- package/.github/pull_request_template.md +24 -0
- package/.github/workflows/native-image.yml +84 -0
- package/LICENSE +203 -0
- package/README.md +273 -0
- package/bb/deps.edn +9 -0
- package/bb/resources/github-fingerprints +3 -0
- package/bb/resources/native-image-tests/run-bb-pod-tests.clj +162 -0
- package/bb/resources/native-image-tests/run-libdatahike-tests +12 -0
- package/bb/resources/native-image-tests/run-native-image-tests +74 -0
- package/bb/resources/native-image-tests/run-python-tests +22 -0
- package/bb/resources/native-image-tests/testconfig.attr-refs.edn +6 -0
- package/bb/resources/native-image-tests/testconfig.edn +5 -0
- package/bb/resources/template/.settings/org.eclipse.jdt.apt.core.prefs +2 -0
- package/bb/resources/template/.settings/org.eclipse.jdt.core.prefs +9 -0
- package/bb/resources/template/.settings/org.eclipse.m2e.core.prefs +4 -0
- package/bb/resources/template/pom.xml +22 -0
- package/bb/src/tools/build.clj +132 -0
- package/bb/src/tools/clj_kondo.clj +32 -0
- package/bb/src/tools/deploy.clj +26 -0
- package/bb/src/tools/examples.clj +19 -0
- package/bb/src/tools/npm.clj +100 -0
- package/bb/src/tools/python.clj +14 -0
- package/bb/src/tools/release.clj +94 -0
- package/bb/src/tools/test.clj +148 -0
- package/bb/src/tools/version.clj +47 -0
- package/bb.edn +269 -0
- package/benchmark/src/benchmark/cli.clj +195 -0
- package/benchmark/src/benchmark/compare.clj +157 -0
- package/benchmark/src/benchmark/config.clj +316 -0
- package/benchmark/src/benchmark/measure.clj +187 -0
- package/benchmark/src/benchmark/store.clj +190 -0
- package/benchmark/test/benchmark/measure_test.clj +156 -0
- package/build.clj +30 -0
- package/config.edn +49 -0
- package/deps.edn +138 -0
- package/dev/sandbox.clj +82 -0
- package/dev/sandbox.cljs +127 -0
- package/dev/sandbox_benchmarks.clj +27 -0
- package/dev/sandbox_client.clj +87 -0
- package/dev/sandbox_transact_bench.clj +109 -0
- package/dev/user.clj +79 -0
- package/doc/README.md +96 -0
- package/doc/adl/README.md +6 -0
- package/doc/adl/adr-000-adr.org +28 -0
- package/doc/adl/adr-001-attribute-references.org +15 -0
- package/doc/adl/adr-002-build-tooling.org +54 -0
- package/doc/adl/adr-003-db-meta-data.md +52 -0
- package/doc/adl/adr-004-github-flow.md +40 -0
- package/doc/adl/adr-XYZ-template.md +30 -0
- package/doc/adl/index.org +3 -0
- package/doc/assets/datahike-logo.svg +3 -0
- package/doc/assets/datahiking-invoice.org +85 -0
- package/doc/assets/hhtree2.png +0 -0
- package/doc/assets/network_topology.svg +624 -0
- package/doc/assets/perf.png +0 -0
- package/doc/assets/schema_mindmap.mm +132 -0
- package/doc/assets/schema_mindmap.svg +970 -0
- package/doc/assets/temporal_index.mm +74 -0
- package/doc/backend-development.md +78 -0
- package/doc/bb-pod.md +89 -0
- package/doc/benchmarking.md +360 -0
- package/doc/bindings/edn-conversion.md +383 -0
- package/doc/cli.md +162 -0
- package/doc/cljdoc.edn +27 -0
- package/doc/cljs-support.md +133 -0
- package/doc/config.md +406 -0
- package/doc/contributing.md +114 -0
- package/doc/datalog-vs-sql.md +210 -0
- package/doc/datomic_differences.md +109 -0
- package/doc/development/pull-api-ns.md +186 -0
- package/doc/development/pull-frame-state-diagram.jpg +0 -0
- package/doc/distributed.md +566 -0
- package/doc/entity_spec.md +92 -0
- package/doc/gc.md +273 -0
- package/doc/java-api.md +808 -0
- package/doc/javascript-api.md +421 -0
- package/doc/libdatahike.md +86 -0
- package/doc/logging_and_error_handling.md +43 -0
- package/doc/norms.md +66 -0
- package/doc/schema-migration.md +85 -0
- package/doc/schema.md +287 -0
- package/doc/storage-backends.md +363 -0
- package/doc/store-id-refactoring.md +596 -0
- package/doc/time_variance.md +325 -0
- package/doc/unstructured.md +167 -0
- package/doc/versioning.md +261 -0
- package/examples/basic/README.md +19 -0
- package/examples/basic/deps.edn +6 -0
- package/examples/basic/docker-compose.yml +13 -0
- package/examples/basic/src/examples/core.clj +60 -0
- package/examples/basic/src/examples/schema.clj +155 -0
- package/examples/basic/src/examples/store.clj +60 -0
- package/examples/basic/src/examples/time_travel.clj +185 -0
- package/examples/java/.settings/org.eclipse.core.resources.prefs +3 -0
- package/examples/java/.settings/org.eclipse.jdt.apt.core.prefs +2 -0
- package/examples/java/.settings/org.eclipse.jdt.core.prefs +9 -0
- package/examples/java/.settings/org.eclipse.m2e.core.prefs +4 -0
- package/examples/java/README.md +162 -0
- package/examples/java/pom.xml +62 -0
- package/examples/java/src/main/java/examples/QuickStart.java +115 -0
- package/examples/java/src/main/java/examples/SchemaExample.java +148 -0
- package/examples/java/src/main/java/examples/TimeTravelExample.java +121 -0
- package/flake.lock +27 -0
- package/flake.nix +27 -0
- package/http-server/datahike/http/middleware.clj +75 -0
- package/http-server/datahike/http/server.clj +269 -0
- package/java/src/datahike/java/Database.java +274 -0
- package/java/src/datahike/java/Datahike.java +281 -0
- package/java/src/datahike/java/DatahikeGeneratedTest.java +349 -0
- package/java/src/datahike/java/DatahikeTest.java +370 -0
- package/java/src/datahike/java/EDN.java +170 -0
- package/java/src/datahike/java/IEntity.java +11 -0
- package/java/src/datahike/java/Keywords.java +161 -0
- package/java/src/datahike/java/SchemaFlexibility.java +52 -0
- package/java/src/datahike/java/Util.java +219 -0
- package/karma.conf.js +19 -0
- package/libdatahike/compile-cpp +7 -0
- package/libdatahike/src/datahike/impl/LibDatahikeBase.java +203 -0
- package/libdatahike/src/datahike/impl/libdatahike.clj +59 -0
- package/libdatahike/src/test_cpp.cpp +61 -0
- package/npm-package/PUBLISHING.md +140 -0
- package/npm-package/README.md +226 -0
- package/npm-package/package.template.json +34 -0
- package/npm-package/test-isomorphic.ts +281 -0
- package/npm-package/test.js +557 -0
- package/npm-package/typescript-test.ts +70 -0
- package/package.json +16 -0
- package/pydatahike/README.md +569 -0
- package/pydatahike/pyproject.toml +91 -0
- package/pydatahike/setup.py +42 -0
- package/pydatahike/src/datahike/__init__.py +134 -0
- package/pydatahike/src/datahike/_native.py +250 -0
- package/pydatahike/src/datahike/_version.py +2 -0
- package/pydatahike/src/datahike/database.py +722 -0
- package/pydatahike/src/datahike/edn.py +311 -0
- package/pydatahike/src/datahike/py.typed +0 -0
- package/pydatahike/tests/conftest.py +17 -0
- package/pydatahike/tests/test_basic.py +170 -0
- package/pydatahike/tests/test_database.py +51 -0
- package/pydatahike/tests/test_edn_conversion.py +299 -0
- package/pydatahike/tests/test_query.py +99 -0
- package/pydatahike/tests/test_schema.py +55 -0
- package/resources/clj-kondo.exports/io.replikativ/datahike/config.edn +5 -0
- package/resources/example_server.edn +4 -0
- package/shadow-cljs.edn +56 -0
- package/src/data_readers.clj +7 -0
- package/src/datahike/api/impl.cljc +176 -0
- package/src/datahike/api/specification.cljc +633 -0
- package/src/datahike/api/types.cljc +261 -0
- package/src/datahike/api.cljc +41 -0
- package/src/datahike/array.cljc +99 -0
- package/src/datahike/cli.clj +166 -0
- package/src/datahike/cljs.cljs +6 -0
- package/src/datahike/codegen/cli.clj +406 -0
- package/src/datahike/codegen/clj_kondo.clj +291 -0
- package/src/datahike/codegen/java.clj +403 -0
- package/src/datahike/codegen/naming.cljc +33 -0
- package/src/datahike/codegen/native.clj +559 -0
- package/src/datahike/codegen/pod.clj +488 -0
- package/src/datahike/codegen/python.clj +838 -0
- package/src/datahike/codegen/report.clj +55 -0
- package/src/datahike/codegen/typescript.clj +262 -0
- package/src/datahike/codegen/validation.clj +145 -0
- package/src/datahike/config.cljc +294 -0
- package/src/datahike/connections.cljc +16 -0
- package/src/datahike/connector.cljc +265 -0
- package/src/datahike/constants.cljc +142 -0
- package/src/datahike/core.cljc +297 -0
- package/src/datahike/datom.cljc +459 -0
- package/src/datahike/db/interface.cljc +119 -0
- package/src/datahike/db/search.cljc +305 -0
- package/src/datahike/db/transaction.cljc +937 -0
- package/src/datahike/db/utils.cljc +338 -0
- package/src/datahike/db.cljc +956 -0
- package/src/datahike/experimental/unstructured.cljc +126 -0
- package/src/datahike/experimental/versioning.cljc +172 -0
- package/src/datahike/externs.js +31 -0
- package/src/datahike/gc.cljc +69 -0
- package/src/datahike/http/client.clj +188 -0
- package/src/datahike/http/writer.clj +79 -0
- package/src/datahike/impl/entity.cljc +218 -0
- package/src/datahike/index/interface.cljc +93 -0
- package/src/datahike/index/persistent_set.cljc +469 -0
- package/src/datahike/index/utils.cljc +44 -0
- package/src/datahike/index.cljc +32 -0
- package/src/datahike/js/api.cljs +172 -0
- package/src/datahike/js/api_macros.clj +22 -0
- package/src/datahike/js.cljs +163 -0
- package/src/datahike/json.cljc +209 -0
- package/src/datahike/lru.cljc +146 -0
- package/src/datahike/migrate.clj +39 -0
- package/src/datahike/norm/norm.clj +245 -0
- package/src/datahike/online_gc.cljc +252 -0
- package/src/datahike/pod.clj +155 -0
- package/src/datahike/pull_api.cljc +325 -0
- package/src/datahike/query.cljc +1945 -0
- package/src/datahike/query_stats.cljc +88 -0
- package/src/datahike/readers.cljc +62 -0
- package/src/datahike/remote.cljc +218 -0
- package/src/datahike/schema.cljc +228 -0
- package/src/datahike/schema_cache.cljc +42 -0
- package/src/datahike/spec.cljc +101 -0
- package/src/datahike/store.cljc +80 -0
- package/src/datahike/tools.cljc +308 -0
- package/src/datahike/transit.cljc +80 -0
- package/src/datahike/writer.cljc +239 -0
- package/src/datahike/writing.cljc +362 -0
- package/src/deps.cljs +1 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree/insert.cljc +76 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree/upsert.cljc +128 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree.cljc +213 -0
- package/test/datahike/backward_compatibility_test/src/backward_test.clj +37 -0
- package/test/datahike/integration_test/config_record_file_test.clj +14 -0
- package/test/datahike/integration_test/config_record_test.clj +14 -0
- package/test/datahike/integration_test/depr_config_uri_test.clj +15 -0
- package/test/datahike/integration_test/return_map_test.clj +62 -0
- package/test/datahike/integration_test.cljc +67 -0
- package/test/datahike/norm/norm_test.clj +124 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/004-tx-data-and-tx-fn-test.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/01-transact-basic-characters.edn +2 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/02 add occupation.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/checksums.edn +12 -0
- package/test/datahike/norm/resources/simple-test/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/simple-test/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/simple-test/checksums.edn +4 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/checksums.edn +6 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/second/004-tx-data-and-tx-fn-test.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/second/checksums.edn +2 -0
- package/test/datahike/norm/resources/tx-fn-test/first/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/tx-fn-test/first/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/tx-fn-test/first/checksums.edn +4 -0
- package/test/datahike/norm/resources/tx-fn-test/second/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/tx-fn-test/second/checksums.edn +2 -0
- package/test/datahike/test/api_test.cljc +895 -0
- package/test/datahike/test/array_test.cljc +40 -0
- package/test/datahike/test/attribute_refs/datoms_test.cljc +140 -0
- package/test/datahike/test/attribute_refs/db_test.cljc +42 -0
- package/test/datahike/test/attribute_refs/differences_test.cljc +515 -0
- package/test/datahike/test/attribute_refs/entity_test.cljc +89 -0
- package/test/datahike/test/attribute_refs/pull_api_test.cljc +320 -0
- package/test/datahike/test/attribute_refs/query_find_specs_test.cljc +59 -0
- package/test/datahike/test/attribute_refs/query_fns_test.cljc +130 -0
- package/test/datahike/test/attribute_refs/query_interop_test.cljc +47 -0
- package/test/datahike/test/attribute_refs/query_not_test.cljc +193 -0
- package/test/datahike/test/attribute_refs/query_or_test.cljc +137 -0
- package/test/datahike/test/attribute_refs/query_pull_test.cljc +156 -0
- package/test/datahike/test/attribute_refs/query_rules_test.cljc +176 -0
- package/test/datahike/test/attribute_refs/query_test.cljc +241 -0
- package/test/datahike/test/attribute_refs/temporal_search.cljc +22 -0
- package/test/datahike/test/attribute_refs/transact_test.cljc +220 -0
- package/test/datahike/test/attribute_refs/utils.cljc +128 -0
- package/test/datahike/test/cache_test.cljc +38 -0
- package/test/datahike/test/components_test.cljc +92 -0
- package/test/datahike/test/config_test.cljc +158 -0
- package/test/datahike/test/core_test.cljc +105 -0
- package/test/datahike/test/datom_test.cljc +44 -0
- package/test/datahike/test/db_test.cljc +54 -0
- package/test/datahike/test/entity_spec_test.cljc +159 -0
- package/test/datahike/test/entity_test.cljc +103 -0
- package/test/datahike/test/explode_test.cljc +143 -0
- package/test/datahike/test/filter_test.cljc +75 -0
- package/test/datahike/test/gc_test.cljc +159 -0
- package/test/datahike/test/http/server_test.clj +192 -0
- package/test/datahike/test/http/writer_test.clj +86 -0
- package/test/datahike/test/ident_test.cljc +32 -0
- package/test/datahike/test/index_test.cljc +345 -0
- package/test/datahike/test/insert.cljc +125 -0
- package/test/datahike/test/java_bindings_test.clj +6 -0
- package/test/datahike/test/listen_test.cljc +41 -0
- package/test/datahike/test/lookup_refs_test.cljc +266 -0
- package/test/datahike/test/lru_test.cljc +27 -0
- package/test/datahike/test/migrate_test.clj +297 -0
- package/test/datahike/test/model/core.cljc +376 -0
- package/test/datahike/test/model/invariant.cljc +142 -0
- package/test/datahike/test/model/rng.cljc +82 -0
- package/test/datahike/test/model_test.clj +217 -0
- package/test/datahike/test/nodejs_test.cljs +262 -0
- package/test/datahike/test/online_gc_test.cljc +475 -0
- package/test/datahike/test/pod_test.clj +369 -0
- package/test/datahike/test/pull_api_test.cljc +474 -0
- package/test/datahike/test/purge_test.cljc +144 -0
- package/test/datahike/test/query_aggregates_test.cljc +101 -0
- package/test/datahike/test/query_find_specs_test.cljc +52 -0
- package/test/datahike/test/query_fns_test.cljc +523 -0
- package/test/datahike/test/query_interop_test.cljc +47 -0
- package/test/datahike/test/query_not_test.cljc +189 -0
- package/test/datahike/test/query_or_test.cljc +158 -0
- package/test/datahike/test/query_pull_test.cljc +147 -0
- package/test/datahike/test/query_rules_test.cljc +248 -0
- package/test/datahike/test/query_stats_test.cljc +218 -0
- package/test/datahike/test/query_test.cljc +984 -0
- package/test/datahike/test/schema_test.cljc +424 -0
- package/test/datahike/test/specification_test.cljc +30 -0
- package/test/datahike/test/store_test.cljc +78 -0
- package/test/datahike/test/stress_test.cljc +57 -0
- package/test/datahike/test/time_variance_test.cljc +518 -0
- package/test/datahike/test/tools_test.clj +134 -0
- package/test/datahike/test/transact_test.cljc +518 -0
- package/test/datahike/test/tuples_test.cljc +564 -0
- package/test/datahike/test/unstructured_test.cljc +291 -0
- package/test/datahike/test/upsert_impl_test.cljc +205 -0
- package/test/datahike/test/upsert_test.cljc +363 -0
- package/test/datahike/test/utils.cljc +110 -0
- package/test/datahike/test/validation_test.cljc +48 -0
- package/test/datahike/test/versioning_test.cljc +56 -0
- package/test/datahike/test.cljc +66 -0
- package/tests.edn +24 -0
package/README.md
ADDED
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
<p align="center">
|
|
2
|
+
<a align="center" href="https://datahike.io" target="_blank">
|
|
3
|
+
<img alt="Datahike" src="./doc/assets/datahike-logo.svg" height="128em">
|
|
4
|
+
</a>
|
|
5
|
+
</p>
|
|
6
|
+
<p align="center">
|
|
7
|
+
<a href="https://clojurians.slack.com/archives/CB7GJAN0L"><img src="https://badgen.net/badge/-/slack?icon=slack&label"/></a>
|
|
8
|
+
<a href="https://clojars.org/org.replikativ/datahike"> <img src="https://img.shields.io/clojars/v/org.replikativ/datahike.svg" /></a>
|
|
9
|
+
<a href="https://circleci.com/gh/replikativ/datahike"><img src="https://circleci.com/gh/replikativ/datahike.svg?style=shield"/></a>
|
|
10
|
+
<a href="https://github.com/replikativ/datahike/tree/main"><img src="https://img.shields.io/github/last-commit/replikativ/datahike/main"/></a>
|
|
11
|
+
</p>
|
|
12
|
+
|
|
13
|
+
**Branch databases, not just code.**
|
|
14
|
+
|
|
15
|
+
[Datahike](https://datahike.io) is a durable [Datalog](https://en.wikipedia.org/wiki/Datalog) database with
|
|
16
|
+
Datomic-compatible APIs and git-like semantics. Built on persistent data structures and structural sharing,
|
|
17
|
+
database snapshots are immutable values that can be held, shared, and queried anywhere—without locks or copying.
|
|
18
|
+
|
|
19
|
+
**Key capabilities:**
|
|
20
|
+
- 🌐 **[Distributed Index Space](./doc/distributed.md)**: Read scaling without database connections—readers access persistent indices directly
|
|
21
|
+
- 🗄️ **[Flexible storage](./doc/storage-backends.md)**: File, LMDB, S3, JDBC, Redis, IndexedDB via konserve
|
|
22
|
+
- 🌍 **[Cross-platform](./doc/README.md#language-bindings-beta)**: JVM, Node.js, Browser (Clojure, ClojureScript, JavaScript, Java APIs)
|
|
23
|
+
- ⚡ **[Real-time sync](./doc/distributed.md)**: WebSocket streaming with Kabel for browser ↔ server
|
|
24
|
+
- 🕰️ **[Time-travel](./doc/time_variance.md)**: Query any historical state, full transaction audit trail
|
|
25
|
+
- 🔒 **[GDPR-ready](./doc/time_variance.md#data-purging)**: Complete data excision for regulatory compliance
|
|
26
|
+
- 🚀 **[Production-proven](https://gitlab.com/arbetsformedlingen/taxonomy-dev)**: Tested with billions of datoms, deployed in government services
|
|
27
|
+
|
|
28
|
+
**Distributed by design**: Datahike is part of the [replikativ](https://github.com/replikativ) ecosystem for decentralized data architectures.
|
|
29
|
+
|
|
30
|
+
## Why Datalog?
|
|
31
|
+
|
|
32
|
+
Modern applications model increasingly complex relationships—social networks, organizational hierarchies, supply chains, knowledge graphs. Traditional SQL forces you to express graph queries through explicit joins, accumulating complexity as relationships grow. Datalog uses **pattern matching over relationships**: describe what you're looking for, not how to join tables.
|
|
33
|
+
|
|
34
|
+
As systems evolve, SQL schemas accumulate join complexity. What starts as simple tables becomes nested subqueries and ad-hoc graph features. Datalog treats relationships as first-class: transitive queries, recursive rules, and multi-database joins are natural to express. The result is maintainable queries that scale with relationship complexity. See [Why Datalog?](./doc/datalog-vs-sql.md) for detailed comparisons.
|
|
35
|
+
|
|
36
|
+
**Time is fundamental to information**: Most value derives from how facts evolve over time. Datahike's immutable design treats the database as an append-only log of facts—queryable at any point in history, enabling audit trails, debugging through time-travel, and GDPR-compliant data excision. Immutability also powers Distributed Index Space: database snapshots are values that can be shared, cached, and queried without locks.
|
|
37
|
+
|
|
38
|
+
You can find [API documentation on cljdoc](https://cljdoc.org/d/org.replikativ/datahike) and articles on Datahike on our company's [blog page](https://datahike.io/notes/).
|
|
39
|
+
|
|
40
|
+
[](https://cljdoc.org/d/org.replikativ/datahike)
|
|
41
|
+
|
|
42
|
+
We presented Datahike also at meetups,for example at:
|
|
43
|
+
|
|
44
|
+
- [2021 Bay Area Clojure meetup](https://www.youtube.com/watch?v=GG-S-xrDS5M)
|
|
45
|
+
- [2019 scicloj online meetup](https://www.youtube.com/watch?v=Hjo4TEV81sQ).
|
|
46
|
+
- [2019 Vancouver Meetup](https://www.youtube.com/watch?v=A2CZwOHOb6U).
|
|
47
|
+
- [2018 Dutch clojure meetup](https://www.youtube.com/watch?v=W6Z1mkvqp3g).
|
|
48
|
+
|
|
49
|
+
## Usage
|
|
50
|
+
|
|
51
|
+
Add to your dependencies:
|
|
52
|
+
|
|
53
|
+
[](http://clojars.org/org.replikativ/datahike)
|
|
54
|
+
|
|
55
|
+
We provide a stable API for the JVM that we extend by first providing experimental/beta features that then get merged into the API over time.
|
|
56
|
+
|
|
57
|
+
```clojure
|
|
58
|
+
(require '[datahike.api :as d])
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
;; use the filesystem as storage medium
|
|
62
|
+
(def cfg {:store {:backend :file
|
|
63
|
+
:id #uuid "550e8400-e29b-41d4-a716-446655440000"
|
|
64
|
+
:path "/tmp/example"}})
|
|
65
|
+
|
|
66
|
+
;; create a database at this place, per default configuration we enforce a strict
|
|
67
|
+
;; schema and keep all historical data
|
|
68
|
+
(d/create-database cfg)
|
|
69
|
+
|
|
70
|
+
(def conn (d/connect cfg))
|
|
71
|
+
|
|
72
|
+
;; the first transaction will be the schema we are using
|
|
73
|
+
;; you may also add this within database creation by adding :initial-tx
|
|
74
|
+
;; to the configuration
|
|
75
|
+
(d/transact conn [{:db/ident :name
|
|
76
|
+
:db/valueType :db.type/string
|
|
77
|
+
:db/cardinality :db.cardinality/one }
|
|
78
|
+
{:db/ident :age
|
|
79
|
+
:db/valueType :db.type/long
|
|
80
|
+
:db/cardinality :db.cardinality/one }])
|
|
81
|
+
|
|
82
|
+
;; lets add some data and wait for the transaction
|
|
83
|
+
(d/transact conn [{:name "Alice", :age 20 }
|
|
84
|
+
{:name "Bob", :age 30 }
|
|
85
|
+
{:name "Charlie", :age 40 }
|
|
86
|
+
{:age 15 }])
|
|
87
|
+
|
|
88
|
+
;; search the data
|
|
89
|
+
(d/q '[:find ?e ?n ?a
|
|
90
|
+
:where
|
|
91
|
+
[?e :name ?n]
|
|
92
|
+
[?e :age ?a]]
|
|
93
|
+
@conn)
|
|
94
|
+
;; => #{[3 "Alice" 20] [4 "Bob" 30] [5 "Charlie" 40]}
|
|
95
|
+
|
|
96
|
+
;; add new entity data using a hash map
|
|
97
|
+
(d/transact conn {:tx-data [{:db/id 3 :age 25}]})
|
|
98
|
+
|
|
99
|
+
;; if you want to work with queries like in
|
|
100
|
+
;; https://grishaev.me/en/datomic-query/,
|
|
101
|
+
;; you may use a hashmap
|
|
102
|
+
(d/q {:query '{:find [?e ?n ?a ]
|
|
103
|
+
:where [[?e :name ?n]
|
|
104
|
+
[?e :age ?a]]}
|
|
105
|
+
:args [@conn]})
|
|
106
|
+
;; => #{[5 "Charlie" 40] [4 "Bob" 30] [3 "Alice" 25]}
|
|
107
|
+
|
|
108
|
+
;; query the history of the data
|
|
109
|
+
(d/q '[:find ?a
|
|
110
|
+
:where
|
|
111
|
+
[?e :name "Alice"]
|
|
112
|
+
[?e :age ?a]]
|
|
113
|
+
(d/history @conn))
|
|
114
|
+
;; => #{[20] [25]}
|
|
115
|
+
|
|
116
|
+
;; you might need to release the connection for specific stores
|
|
117
|
+
(d/release conn)
|
|
118
|
+
|
|
119
|
+
;; clean up the database if it is not need any more
|
|
120
|
+
(d/delete-database cfg)
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
The API namespace provides compatibility to a subset of Datomic functionality
|
|
124
|
+
and should work as a drop-in replacement on the JVM. The rest of Datahike will
|
|
125
|
+
be ported to core.async to coordinate IO in a platform-neutral manner.
|
|
126
|
+
|
|
127
|
+
## Documentation
|
|
128
|
+
|
|
129
|
+
**[📖 Complete Documentation Index](./doc/README.md)** - Organized by topic and skill level
|
|
130
|
+
|
|
131
|
+
**Quick links:**
|
|
132
|
+
- [Configuration](./doc/config.md) - Database setup and backend options
|
|
133
|
+
- [Why Datalog?](./doc/datalog-vs-sql.md) - Query comparisons and when to use Datalog
|
|
134
|
+
- [Language Bindings](./doc/README.md#language-bindings-beta) - Java, JavaScript, Python, CLI, and more (beta)
|
|
135
|
+
- [Time Variance](./doc/time_variance.md) - Time-travel queries and GDPR-compliant purging
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
For simple examples have a look at the projects in the `examples` folder.
|
|
139
|
+
|
|
140
|
+
## Example Projects
|
|
141
|
+
|
|
142
|
+
### Applications
|
|
143
|
+
|
|
144
|
+
- **[Beleg](https://github.com/replikativ/beleg)** - Invoice and CRM system with web UI, LaTeX PDF generation, and Datahike persistence. Works as both an example project demonstrating Datahike patterns and a practical solution for contractors and small businesses. Features customers, tasks, offers, and invoices. Successor to the [original Datahike invoice demo](https://www.youtube.com/watch?v=W6Z1mkvqp3g) from Dutch Clojure Meetup 2018.
|
|
145
|
+
|
|
146
|
+
## ClojureScript & JavaScript Support
|
|
147
|
+
|
|
148
|
+
Datahike has **beta ClojureScript support** for both **Node.js** (file backend) and **browsers** (IndexedDB with TieredStore for memory hierarchies).
|
|
149
|
+
|
|
150
|
+
**JavaScript API** (beta):
|
|
151
|
+
|
|
152
|
+
Install from npm:
|
|
153
|
+
```bash
|
|
154
|
+
npm install datahike@next
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
Example usage:
|
|
158
|
+
```javascript
|
|
159
|
+
const d = require('datahike');
|
|
160
|
+
const crypto = require('crypto');
|
|
161
|
+
|
|
162
|
+
const config = {
|
|
163
|
+
store: {
|
|
164
|
+
backend: ':memory',
|
|
165
|
+
id: crypto.randomUUID()
|
|
166
|
+
},
|
|
167
|
+
'schema-flexibility': ':read'
|
|
168
|
+
};
|
|
169
|
+
|
|
170
|
+
await d.createDatabase(config);
|
|
171
|
+
const conn = await d.connect(config);
|
|
172
|
+
await d.transact(conn, [{ name: 'Alice' }]);
|
|
173
|
+
const db = await d.db(conn);
|
|
174
|
+
const results = await d.q('[:find ?n :where [?e :name ?n]]', db);
|
|
175
|
+
console.log(results);
|
|
176
|
+
// => [['Alice']]
|
|
177
|
+
```
|
|
178
|
+
|
|
179
|
+
See [JavaScript API documentation](./doc/javascript-api.md) for details.
|
|
180
|
+
|
|
181
|
+
**Browser with real-time sync**: Combine IndexedDB storage with [Kabel](https://github.com/replikativ/kabel) WebSocket middleware for offline-capable applications.
|
|
182
|
+
|
|
183
|
+
**Native CLI tool** (`dthk`) (beta): Compiled with GraalVM native-image for instant startup. Ships with file backend support, scriptable for quick queries and automation. Available in [releases](https://github.com/replikativ/datahike/releases). See [CLI documentation](./doc/cli.md).
|
|
184
|
+
|
|
185
|
+
**Babashka pod** (beta): Native-compiled pod available in the [Babashka pod registry](https://github.com/babashka/pod-registry) for shell scripting. See [Babashka pod documentation](./doc/bb-pod.md).
|
|
186
|
+
|
|
187
|
+
**Java API** (beta): Comprehensive bindings with fluent builder pattern and automatic collection conversion. See [Java API documentation](./doc/java-api.md) for the full API guide and [examples](./examples/java/).
|
|
188
|
+
|
|
189
|
+
**libdatahike** (beta): *C/C++ native bindings* enable embedding Datahike in non-JVM applications. See [libdatahike documentation](./doc/libdatahike.md).
|
|
190
|
+
|
|
191
|
+
**Python bindings** (beta): High-level Pythonic API with automatic EDN conversion. See [Python documentation](./pydatahike/README.md).
|
|
192
|
+
|
|
193
|
+
## Production Use
|
|
194
|
+
|
|
195
|
+
### Swedish Public Employment Service
|
|
196
|
+
|
|
197
|
+
The [Swedish Public Employment Service](https://arbetsformedlingen.se) (Arbetsförmedlingen) has been using Datahike in production since 2024 to serve the [JobTech Taxonomy](https://gitlab.com/arbetsformedlingen/taxonomy-dev/backend/jobtech-taxonomy-api) (Arbetsmarknadstaxonomin) - a labour market terminology database with 40,000+ concepts representing occupations, skills, and education standards, accessed daily by thousands of case workers across Sweden.
|
|
198
|
+
|
|
199
|
+
**Technical Highlights**:
|
|
200
|
+
- **Scale**: 60+ schema attributes with multi-language support (Swedish, English)
|
|
201
|
+
- **Architecture**: Multi-backend abstraction supporting both Datomic and Datahike
|
|
202
|
+
- **API**: GraphQL interface with Apache Lucene full-text search
|
|
203
|
+
- **Compliance**: Full transaction history for regulatory audit trail
|
|
204
|
+
- **Resilience**: S3-based backup/restore for disaster recovery
|
|
205
|
+
- **Migration**: Successfully migrated from Datomic after extensive testing (U1 → I1 → Production)
|
|
206
|
+
|
|
207
|
+
**Resources**:
|
|
208
|
+
- **Source Code**: [jobtech-taxonomy-api](https://gitlab.com/arbetsformedlingen/taxonomy-dev/backend/jobtech-taxonomy-api) (2,851+ commits)
|
|
209
|
+
- **Benchmarks**: [Performance comparison suite](https://gitlab.com/arbetsformedlingen/taxonomy-dev/backend/experimental/datahike-benchmark) (Datahike vs Datomic)
|
|
210
|
+
- **Migration Story**: [Plan.md](https://gitlab.com/arbetsformedlingen/taxonomy-dev/backend/jobtech-taxonomy-api/-/blob/develop/test/datahike/Plan.md) - detailed deployment journey
|
|
211
|
+
|
|
212
|
+
This represents one of the most comprehensive open-source Datahike deployments, demonstrating production-readiness at government scale.
|
|
213
|
+
|
|
214
|
+
### Stub - Accounting for African Entrepreneurs
|
|
215
|
+
|
|
216
|
+
[Stub](https://stub.africa/) is a comprehensive accounting and invoicing platform serving 5,000+ small businesses across South Africa. Built by [Alexander Oloo](https://github.com/alekcz) with Datahike powering the core data layer.
|
|
217
|
+
|
|
218
|
+
**Features**: Invoicing with payment integration, double-entry bookkeeping, bank sync (Capitec, FNB, Absa, Nedbank), VAT tracking, inventory management, and financial reporting.
|
|
219
|
+
|
|
220
|
+
### Heidelberg University - Emotion Tracking
|
|
221
|
+
|
|
222
|
+
Heidelberg University uses Datahike in an internal emotion tracking application for psychological research (source not publicly available).
|
|
223
|
+
|
|
224
|
+
## Proximum: Vector Search for Datahike
|
|
225
|
+
|
|
226
|
+
[Proximum](https://datahike.io/proximum) is a high-performance HNSW vector index designed for Datahike's persistent data model. It brings semantic search and RAG capabilities to Datahike while maintaining immutability and full audit history.
|
|
227
|
+
|
|
228
|
+
**Key features**:
|
|
229
|
+
- Fast HNSW (Hierarchical Navigable Small World) vector search
|
|
230
|
+
- Immutable index snapshots—same git-like semantics as Datahike
|
|
231
|
+
- Persistent data structures without mutation or locks
|
|
232
|
+
- Dual-licensed: EPL-2.0 (open source) and commercial license
|
|
233
|
+
|
|
234
|
+
See [datahike.io/proximum](https://datahike.io/proximum) for details. Integration as secondary index into Datahike coming soon.
|
|
235
|
+
|
|
236
|
+
## Composable Ecosystem
|
|
237
|
+
|
|
238
|
+
Datahike is **compositional by design**—built from independent, reusable libraries that work together but can be used separately in your own systems. Each component is open source and maintained as part of the [replikativ](https://github.com/replikativ) project.
|
|
239
|
+
|
|
240
|
+
**Core libraries:**
|
|
241
|
+
- **[konserve](https://github.com/replikativ/konserve)**: Pluggable key-value store abstraction with backends for File, LMDB, S3, JDBC, Redis, IndexedDB, and more. Use it for any persistent storage needs beyond Datahike.
|
|
242
|
+
- **[kabel](https://github.com/replikativ/kabel)**: WebSocket transport with middleware support. Build real-time communication layers for any application.
|
|
243
|
+
- **[hasch](https://github.com/replikativ/hasch)**: Content-addressable hashing for Clojure data structures. Create immutable references to data.
|
|
244
|
+
- **[incognito](https://github.com/replikativ/incognito)**: Extensible serialization for custom types. Serialize any Clojure data across networks or storage.
|
|
245
|
+
- **[superv.async](https://github.com/replikativ/superv.async)**: Supervision and error handling for core.async. Build robust asynchronous systems.
|
|
246
|
+
|
|
247
|
+
**Advanced:**
|
|
248
|
+
- **[replikativ](https://github.com/replikativ/replikativ)**: CRDT-based data synchronization for eventually consistent systems. Build collaborative applications with automatic conflict resolution.
|
|
249
|
+
- **[distributed-scope](https://github.com/simm-is/distributed-scope)**: Remote function invocation with Clojure semantics across processes.
|
|
250
|
+
|
|
251
|
+
This modularity enables **custom solutions** across languages and runtimes: embed konserve in Python applications, use kabel for non-database real-time systems, or build entirely new databases on the same storage layer. Datahike demonstrates how these components work together, but you're not locked into our choices.
|
|
252
|
+
|
|
253
|
+
## Roadmap and Participation
|
|
254
|
+
|
|
255
|
+
Instead of providing a static roadmap, we work closely with the community to decide what will be worked on next in a dynamic and interactive way.
|
|
256
|
+
|
|
257
|
+
**How it works:**
|
|
258
|
+
|
|
259
|
+
Go to [GitHub Discussions](https://github.com/replikativ/datahike/discussions/categories/ideas) and upvote the _ideas_ you'd like to see in Datahike. When we have capacity for a new feature, we address the most upvoted items.
|
|
260
|
+
|
|
261
|
+
You can also propose ideas yourself—either by adding them to Discussions or by creating a pull request. Note that due to backward compatibility considerations, some PRs may take time to integrate.
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
## Commercial Support
|
|
265
|
+
|
|
266
|
+
We are happy to provide commercial support. If you are interested in a particular
|
|
267
|
+
feature, please contact us at [contact@datahike.io](mailto:contact@datahike.io).
|
|
268
|
+
|
|
269
|
+
## License
|
|
270
|
+
|
|
271
|
+
Copyright © 2014–2026 Christian Weilbach et al.
|
|
272
|
+
|
|
273
|
+
Licensed under Eclipse Public License (see [LICENSE](LICENSE)).
|
package/bb/deps.edn
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{:paths ["src" "resources"]
|
|
2
|
+
:deps {org.babashka/spec.alpha {:git/url "https://github.com/babashka/spec.alpha"
|
|
3
|
+
:git/sha "644a7fc216e43d5da87b07471b0f87d874107d1a"}
|
|
4
|
+
;; https://github.com/babashka/tools.bbuild
|
|
5
|
+
io.github.babashka/tools.bbuild {:git/sha "73e4d0a26c65cdf1d4c7bf7e9e46e6e5f1978a37"}
|
|
6
|
+
;; https://github.com/borkdude/gh-release-artifact
|
|
7
|
+
io.github.borkdude/gh-release-artifact {:git/sha "05f8d8659e6805d513c59447ff41dc8497878462"}
|
|
8
|
+
selmer/selmer {:mvn/version "1.12.55"}
|
|
9
|
+
cheshire/cheshire {:mvn/version "5.13.0"}}}
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl
|
|
2
|
+
github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=
|
|
3
|
+
github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
#!/usr/bin/env bb
|
|
2
|
+
|
|
3
|
+
(require '[babashka.pods :as pods]
|
|
4
|
+
'[clojure.test :refer [run-tests deftest testing is]])
|
|
5
|
+
(import '[java.util Date])
|
|
6
|
+
|
|
7
|
+
(pods/load-pod "./dthk")
|
|
8
|
+
|
|
9
|
+
(require '[datahike.pod :as d])
|
|
10
|
+
|
|
11
|
+
(def config {:keep-history? true,
|
|
12
|
+
:search-cache-size 10000,
|
|
13
|
+
:index :datahike.index/persistent-set,
|
|
14
|
+
:store {:id #uuid "550e8400-e29b-41d4-a716-446655440763", :backend :memory :scope "test.datahike.io"},
|
|
15
|
+
:store-cache-size 1000,
|
|
16
|
+
:attribute-refs? false,
|
|
17
|
+
:writer {:backend :self},
|
|
18
|
+
:crypto-hash? false,
|
|
19
|
+
:schema-flexibility :read,
|
|
20
|
+
:branch :db})
|
|
21
|
+
|
|
22
|
+
(deftest pod-workflow
|
|
23
|
+
|
|
24
|
+
(testing "delete-database"
|
|
25
|
+
(is (= nil
|
|
26
|
+
(d/delete-database config))))
|
|
27
|
+
|
|
28
|
+
(testing "create-database"
|
|
29
|
+
(is (= {:keep-history? true
|
|
30
|
+
:search-cache-size 10000
|
|
31
|
+
:index :datahike.index/persistent-set
|
|
32
|
+
:store {:id #uuid "550e8400-e29b-41d4-a716-446655440763", :backend :memory :scope "test.datahike.io"}
|
|
33
|
+
:store-cache-size 1000
|
|
34
|
+
:attribute-refs? false
|
|
35
|
+
:writer {:backend :self}
|
|
36
|
+
:crypto-hash? false
|
|
37
|
+
:schema-flexibility :read
|
|
38
|
+
:branch :db}
|
|
39
|
+
(d/create-database config))))
|
|
40
|
+
|
|
41
|
+
(testing "database-exists?"
|
|
42
|
+
(is (= true
|
|
43
|
+
(d/database-exists? config))))
|
|
44
|
+
|
|
45
|
+
(let [conn (d/connect config)]
|
|
46
|
+
(testing "connect"
|
|
47
|
+
(is (string? conn))
|
|
48
|
+
(is (clojure.string/starts-with? conn "conn:")))
|
|
49
|
+
(testing "transact"
|
|
50
|
+
(is (= [:tempids :db-before :db-after :tx-meta :tx-data]
|
|
51
|
+
(keys (d/transact conn [{:name "Alice", :age 20}
|
|
52
|
+
{:name "Bob", :age 30}
|
|
53
|
+
{:name "Charlie", :age 40}
|
|
54
|
+
{:age 15}])))))
|
|
55
|
+
; (testing "transact with bad arg"
|
|
56
|
+
; (is (thrown? clojure.lang.ExceptionInfo
|
|
57
|
+
; (keys (d/transact
|
|
58
|
+
; "foo"
|
|
59
|
+
; [{:name "Alice", :age 20}
|
|
60
|
+
; {:name "Bob", :age 30}
|
|
61
|
+
; {:name "Charlie", :age 40}
|
|
62
|
+
; {:age 15}])))))
|
|
63
|
+
(testing "with-db"
|
|
64
|
+
(is (= #{[2 "Bob" 30] [1 "Alice" 20] [3 "Charlie" 40]}
|
|
65
|
+
(d/with-db [db (d/db conn)]
|
|
66
|
+
(d/q {:query '{:find [?e ?n ?a]
|
|
67
|
+
:where
|
|
68
|
+
[[?e :name ?n]
|
|
69
|
+
[?e :age ?a]]}
|
|
70
|
+
:args [(d/db conn)]})))))
|
|
71
|
+
(testing "release-db"
|
|
72
|
+
(let [db (d/db conn)]
|
|
73
|
+
(is (= {}
|
|
74
|
+
(d/release-db db)))))
|
|
75
|
+
(testing "q"
|
|
76
|
+
(is (= #{[2 "Bob" 30] [1 "Alice" 20] [3 "Charlie" 40]}
|
|
77
|
+
(d/q {:query '{:find [?e ?n ?a]
|
|
78
|
+
:where
|
|
79
|
+
[[?e :name ?n]
|
|
80
|
+
[?e :age ?a]]}
|
|
81
|
+
:args [(d/db conn)]})
|
|
82
|
+
(d/q '[:find ?e ?n ?a
|
|
83
|
+
:where
|
|
84
|
+
[?e :name ?n]
|
|
85
|
+
[?e :age ?a]]
|
|
86
|
+
(d/db conn)))))
|
|
87
|
+
(let [timestamp (Date.)]
|
|
88
|
+
(Thread/sleep 1)
|
|
89
|
+
(d/transact conn {:tx-data [{:db/id 3 :age 25}]})
|
|
90
|
+
(d/transact conn [{:name "FOO" :age "BAR"}])
|
|
91
|
+
(testing "pull"
|
|
92
|
+
(is (= {:db/id 1, :age 20, :name "Alice"}
|
|
93
|
+
(d/pull (d/db conn) '[*] 1))))
|
|
94
|
+
(testing "pull-many"
|
|
95
|
+
(is (= [{:db/id 1, :age 20, :name "Alice"}
|
|
96
|
+
{:db/id 2, :age 30, :name "Bob"}
|
|
97
|
+
{:db/id 3, :age 25, :name "Charlie"}]
|
|
98
|
+
(d/pull-many (d/db conn) '[*] [1 2 3]))))
|
|
99
|
+
(testing "metrics"
|
|
100
|
+
(is (= {:per-attr-counts {:age 5, :name 4, :db/txInstant 3}
|
|
101
|
+
:per-entity-counts {1 2, 2 2, 3 2, 4 1, 5 2, 536870913 1, 536870914 1, 536870915 1}
|
|
102
|
+
:count 12, :avet-count 0, :temporal-count 11, :temporal-avet-count 0}
|
|
103
|
+
(d/metrics (d/db conn)))))
|
|
104
|
+
(testing "as-of tx-id"
|
|
105
|
+
(is (= #{[3 "Charlie" 25] [2 "Bob" 30] [5 "FOO" "BAR"] [1 "Alice" 20]}
|
|
106
|
+
(d/q '[:find ?e ?n ?a
|
|
107
|
+
:where
|
|
108
|
+
[?e :name ?n]
|
|
109
|
+
[?e :age ?a]]
|
|
110
|
+
(d/as-of (d/db conn) 536870916)))))
|
|
111
|
+
(testing "as-of timestamp"
|
|
112
|
+
(is (= #{[2 "Bob" 30] [1 "Alice" 20] [3 "Charlie" 40]}
|
|
113
|
+
(d/q '[:find ?e ?n ?a
|
|
114
|
+
:where
|
|
115
|
+
[?e :name ?n]
|
|
116
|
+
[?e :age ?a]]
|
|
117
|
+
(d/as-of (d/db conn) timestamp)))))
|
|
118
|
+
(testing "since tx-id"
|
|
119
|
+
(is (= #{[5 "FOO" "BAR"]}
|
|
120
|
+
(d/q '[:find ?e ?n ?a
|
|
121
|
+
:where
|
|
122
|
+
[?e :name ?n]
|
|
123
|
+
[?e :age ?a]]
|
|
124
|
+
(d/since (d/db conn) 536870914)))))
|
|
125
|
+
(testing "since timestamp"
|
|
126
|
+
(is (= #{[5 "FOO" "BAR"]}
|
|
127
|
+
(d/q '[:find ?e ?n ?a
|
|
128
|
+
:where
|
|
129
|
+
[?e :name ?n]
|
|
130
|
+
[?e :age ?a]]
|
|
131
|
+
(d/since (d/db conn) timestamp)))))
|
|
132
|
+
(testing "history"
|
|
133
|
+
(is (= #{[3 "Charlie" 25] [2 "Bob" 30] [5 "FOO" "BAR"] [1 "Alice" 20] [3 "Charlie" 40]}
|
|
134
|
+
(d/q '[:find ?e ?n ?a
|
|
135
|
+
:where
|
|
136
|
+
[?e :name ?n]
|
|
137
|
+
[?e :age ?a]]
|
|
138
|
+
(d/history (d/db conn))))))
|
|
139
|
+
(testing "datoms"
|
|
140
|
+
(is (= '((1 :age 20 536870913 true))
|
|
141
|
+
(d/datoms (d/db conn) :eavt 1 :age 20)))
|
|
142
|
+
(is (= '((2 :age 30 536870913 true) (2 :name "Bob" 536870913 true))
|
|
143
|
+
(d/datoms (d/db conn) {:index :eavt :components [2]}))))
|
|
144
|
+
(testing "schema"
|
|
145
|
+
(d/transact conn {:tx-data [{:db/ident :name :db/valueType :db.type/string :db/unique :db.unique/identity
|
|
146
|
+
:db/index true :db/cardinality :db.cardinality/one}
|
|
147
|
+
{:db/ident :age :db/valueType :db.type/long :db/cardinality :db.cardinality/one}]})
|
|
148
|
+
(is (= {:name {:db/ident :name, :db/valueType :db.type/string, :db/unique :db.unique/identity
|
|
149
|
+
:db/index true, :db/cardinality :db.cardinality/one, :db/id 6}
|
|
150
|
+
:age {:db/ident :age, :db/valueType :db.type/long, :db/cardinality :db.cardinality/one, :db/id 7}}
|
|
151
|
+
(d/schema (d/db conn))))
|
|
152
|
+
(testing "entity"
|
|
153
|
+
(is (= {:age "BAR" :name "FOO"}
|
|
154
|
+
(d/entity (d/db conn) 5))))))))
|
|
155
|
+
|
|
156
|
+
(defn -main [& _args]
|
|
157
|
+
(let [{:keys [fail error]} (run-tests)]
|
|
158
|
+
(when (and fail error (pos? (+ fail error)))
|
|
159
|
+
(System/exit 1))))
|
|
160
|
+
|
|
161
|
+
(when (= *file* (System/getProperty "babashka.file"))
|
|
162
|
+
(apply -main *command-line-args*))
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
|
|
3
|
+
set -o errexit
|
|
4
|
+
set -o pipefail
|
|
5
|
+
set -o nounset
|
|
6
|
+
|
|
7
|
+
export LD_LIBRARY_PATH=./libdatahike/target
|
|
8
|
+
export DYLD_LIBRARY_PATH=./libdatahike/target
|
|
9
|
+
|
|
10
|
+
rm -rf "/tmp/libdatahike-test"
|
|
11
|
+
./libdatahike/compile-cpp
|
|
12
|
+
./libdatahike/target/test_cpp
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
set -o errexit
|
|
4
|
+
set -o pipefail
|
|
5
|
+
|
|
6
|
+
TMPSTORE=/tmp/dh-test-store
|
|
7
|
+
CONFIG=bb/resources/native-image-tests/testconfig.edn
|
|
8
|
+
ATTR_REF_CONFIG=bb/resources/native-image-tests/testconfig.attr-refs.edn
|
|
9
|
+
|
|
10
|
+
trap "rm -rf $TMPSTORE" EXIT
|
|
11
|
+
|
|
12
|
+
./dthk delete-database edn:$ATTR_REF_CONFIG
|
|
13
|
+
./dthk create-database edn:$ATTR_REF_CONFIG
|
|
14
|
+
|
|
15
|
+
./dthk database-exists edn:$ATTR_REF_CONFIG
|
|
16
|
+
|
|
17
|
+
# test that warnings etc. get logged to stderr
|
|
18
|
+
LOG_OUTPUT="$(./dthk query '[:find ?e . :where [?e :nonexistent _]]' db:$ATTR_REF_CONFIG 2>&1 >/dev/null | grep ':nonexistent has not been found')"
|
|
19
|
+
if [ -z "$LOG_OUTPUT" ]
|
|
20
|
+
then
|
|
21
|
+
echo "Exception: binary did not log to stderr"
|
|
22
|
+
exit 1
|
|
23
|
+
fi
|
|
24
|
+
|
|
25
|
+
./dthk delete-database edn:$CONFIG
|
|
26
|
+
./dthk create-database edn:$CONFIG
|
|
27
|
+
|
|
28
|
+
./dthk database-exists edn:$CONFIG
|
|
29
|
+
|
|
30
|
+
# Add test data with tx transact (benchmark command no longer available)
|
|
31
|
+
for i in {1..100}; do
|
|
32
|
+
./dthk transact conn:$CONFIG "[[:db/add -$i :name \"User-$i\"]]"
|
|
33
|
+
done
|
|
34
|
+
./dthk transact conn:$CONFIG '[[:db/add -1 :name "Judea"]]'
|
|
35
|
+
QUERY_OUT=`./dthk query '[:find (count ?e) . :where [?e :name _]]' db:$CONFIG`
|
|
36
|
+
|
|
37
|
+
if [ $QUERY_OUT -eq 101 ]
|
|
38
|
+
then
|
|
39
|
+
echo "Test successful."
|
|
40
|
+
else
|
|
41
|
+
echo "Exception: Query did not return correct value."
|
|
42
|
+
exit 1
|
|
43
|
+
fi
|
|
44
|
+
|
|
45
|
+
# test history input parsing
|
|
46
|
+
./dthk query '[:find (count ?e) . :where [?e :name _]]' history:$CONFIG
|
|
47
|
+
./dthk query '[:find (count ?e) . :where [?e :name _]]' since:0:$CONFIG
|
|
48
|
+
./dthk query '[:find (count ?e) . :where [?e :name _]]' asof:0:$CONFIG
|
|
49
|
+
|
|
50
|
+
# other calls
|
|
51
|
+
./dthk pull db:$CONFIG "[:db/id, :name]" "1"
|
|
52
|
+
./dthk pull-many db:$CONFIG "[:db/id, :name]" "[1]"
|
|
53
|
+
./dthk entity db:$CONFIG "1"
|
|
54
|
+
./dthk datoms db:$CONFIG "{:index :eavt :components [1]}"
|
|
55
|
+
./dthk schema db:$CONFIG
|
|
56
|
+
./dthk reverse-schema db:$CONFIG
|
|
57
|
+
./dthk metrics db:$CONFIG
|
|
58
|
+
|
|
59
|
+
# test serialization
|
|
60
|
+
./dthk query '[:find ?e . :where [?e :name ?n]]' db:$CONFIG --format cbor >> /tmp/test
|
|
61
|
+
./dthk query '[:find ?i :in $ ?i . :where [?e :name ?n]]' db:$CONFIG cbor:/tmp/test # => 1
|
|
62
|
+
|
|
63
|
+
# test arbitrary :in[puts] as positional args
|
|
64
|
+
QUERY_OUT=`./dthk query '[:find (pull ?e [*]) . :in $ ?name :where [?e :name ?name]]' db:$CONFIG '"Judea"'`
|
|
65
|
+
if [ "$QUERY_OUT" = '{:db/id 101, :name "Judea"}' ]
|
|
66
|
+
then
|
|
67
|
+
echo "Positional input test successful."
|
|
68
|
+
else
|
|
69
|
+
echo "Exception: Query did not return correct value."
|
|
70
|
+
exit 1
|
|
71
|
+
fi
|
|
72
|
+
|
|
73
|
+
./dthk delete-database edn:$CONFIG
|
|
74
|
+
./dthk delete-database edn:$ATTR_REF_CONFIG
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
|
|
3
|
+
set -o errexit
|
|
4
|
+
set -o pipefail
|
|
5
|
+
set -o nounset
|
|
6
|
+
|
|
7
|
+
echo "Setting up Python test environment..."
|
|
8
|
+
|
|
9
|
+
# Set library paths so Python can find libdatahike.so
|
|
10
|
+
export LD_LIBRARY_PATH=./libdatahike/target
|
|
11
|
+
export DYLD_LIBRARY_PATH=./libdatahike/target
|
|
12
|
+
|
|
13
|
+
# Install pytest (use pip3 explicitly to avoid Python 2.7)
|
|
14
|
+
echo "Installing pytest..."
|
|
15
|
+
pip3 install --user pytest
|
|
16
|
+
|
|
17
|
+
# Run Python tests using python3 -m pytest to avoid PATH issues
|
|
18
|
+
echo "Running Python tests..."
|
|
19
|
+
cd pydatahike
|
|
20
|
+
python3 -m pytest -v tests/
|
|
21
|
+
|
|
22
|
+
echo "Python tests completed successfully!"
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
eclipse.preferences.version=1
|
|
2
|
+
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
|
|
3
|
+
org.eclipse.jdt.core.compiler.compliance=1.8
|
|
4
|
+
org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
|
|
5
|
+
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
|
6
|
+
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore
|
|
7
|
+
org.eclipse.jdt.core.compiler.processAnnotations=disabled
|
|
8
|
+
org.eclipse.jdt.core.compiler.release=disabled
|
|
9
|
+
org.eclipse.jdt.core.compiler.source=1.8
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
2
|
+
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
3
|
+
<version>0.0.0</version>
|
|
4
|
+
<modelVersion>4.0.0</modelVersion>
|
|
5
|
+
<groupId>io.replikativ</groupId>
|
|
6
|
+
<artifactId>datahike</artifactId>
|
|
7
|
+
<packaging>jar</packaging>
|
|
8
|
+
<name>datahike</name>
|
|
9
|
+
<description>A durable datalog implementation adaptable for distribution.</description>
|
|
10
|
+
<url>https://datahike.io</url>
|
|
11
|
+
<licenses>
|
|
12
|
+
<license>
|
|
13
|
+
<name>Eclipse</name>
|
|
14
|
+
<url>http://www.eclipse.org/legal/epl-v10.html</url>
|
|
15
|
+
</license>
|
|
16
|
+
</licenses>
|
|
17
|
+
<scm>
|
|
18
|
+
<connection>scm:git:git@github.com:replikativ/datahike.git</connection>
|
|
19
|
+
<developerConnection>scm:git:git@github.com/replikativ/datahike.git</developerConnection>
|
|
20
|
+
<url>https://github.com/replikativ/datahike</url>
|
|
21
|
+
</scm>
|
|
22
|
+
</project>
|