datahike-browser-tests 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.circleci/config.yml +405 -0
- package/.circleci/scripts/gen_ci.clj +194 -0
- package/.cirrus.yml +60 -0
- package/.clj-kondo/babashka/sci/config.edn +1 -0
- package/.clj-kondo/babashka/sci/sci/core.clj +9 -0
- package/.clj-kondo/config.edn +95 -0
- package/.dir-locals.el +2 -0
- package/.github/FUNDING.yml +3 -0
- package/.github/ISSUE_TEMPLATE/1-bug-report.yml +68 -0
- package/.github/ISSUE_TEMPLATE/2-feature-request.yml +28 -0
- package/.github/ISSUE_TEMPLATE/config.yml +6 -0
- package/.github/pull_request_template.md +24 -0
- package/.github/workflows/native-image.yml +84 -0
- package/LICENSE +203 -0
- package/README.md +273 -0
- package/bb/deps.edn +9 -0
- package/bb/resources/github-fingerprints +3 -0
- package/bb/resources/native-image-tests/run-bb-pod-tests.clj +162 -0
- package/bb/resources/native-image-tests/run-libdatahike-tests +12 -0
- package/bb/resources/native-image-tests/run-native-image-tests +74 -0
- package/bb/resources/native-image-tests/run-python-tests +22 -0
- package/bb/resources/native-image-tests/testconfig.attr-refs.edn +6 -0
- package/bb/resources/native-image-tests/testconfig.edn +5 -0
- package/bb/resources/template/.settings/org.eclipse.jdt.apt.core.prefs +2 -0
- package/bb/resources/template/.settings/org.eclipse.jdt.core.prefs +9 -0
- package/bb/resources/template/.settings/org.eclipse.m2e.core.prefs +4 -0
- package/bb/resources/template/pom.xml +22 -0
- package/bb/src/tools/build.clj +132 -0
- package/bb/src/tools/clj_kondo.clj +32 -0
- package/bb/src/tools/deploy.clj +26 -0
- package/bb/src/tools/examples.clj +19 -0
- package/bb/src/tools/npm.clj +100 -0
- package/bb/src/tools/python.clj +14 -0
- package/bb/src/tools/release.clj +94 -0
- package/bb/src/tools/test.clj +148 -0
- package/bb/src/tools/version.clj +47 -0
- package/bb.edn +269 -0
- package/benchmark/src/benchmark/cli.clj +195 -0
- package/benchmark/src/benchmark/compare.clj +157 -0
- package/benchmark/src/benchmark/config.clj +316 -0
- package/benchmark/src/benchmark/measure.clj +187 -0
- package/benchmark/src/benchmark/store.clj +190 -0
- package/benchmark/test/benchmark/measure_test.clj +156 -0
- package/build.clj +30 -0
- package/config.edn +49 -0
- package/deps.edn +138 -0
- package/dev/sandbox.clj +82 -0
- package/dev/sandbox.cljs +127 -0
- package/dev/sandbox_benchmarks.clj +27 -0
- package/dev/sandbox_client.clj +87 -0
- package/dev/sandbox_transact_bench.clj +109 -0
- package/dev/user.clj +79 -0
- package/doc/README.md +96 -0
- package/doc/adl/README.md +6 -0
- package/doc/adl/adr-000-adr.org +28 -0
- package/doc/adl/adr-001-attribute-references.org +15 -0
- package/doc/adl/adr-002-build-tooling.org +54 -0
- package/doc/adl/adr-003-db-meta-data.md +52 -0
- package/doc/adl/adr-004-github-flow.md +40 -0
- package/doc/adl/adr-XYZ-template.md +30 -0
- package/doc/adl/index.org +3 -0
- package/doc/assets/datahike-logo.svg +3 -0
- package/doc/assets/datahiking-invoice.org +85 -0
- package/doc/assets/hhtree2.png +0 -0
- package/doc/assets/network_topology.svg +624 -0
- package/doc/assets/perf.png +0 -0
- package/doc/assets/schema_mindmap.mm +132 -0
- package/doc/assets/schema_mindmap.svg +970 -0
- package/doc/assets/temporal_index.mm +74 -0
- package/doc/backend-development.md +78 -0
- package/doc/bb-pod.md +89 -0
- package/doc/benchmarking.md +360 -0
- package/doc/bindings/edn-conversion.md +383 -0
- package/doc/cli.md +162 -0
- package/doc/cljdoc.edn +27 -0
- package/doc/cljs-support.md +133 -0
- package/doc/config.md +406 -0
- package/doc/contributing.md +114 -0
- package/doc/datalog-vs-sql.md +210 -0
- package/doc/datomic_differences.md +109 -0
- package/doc/development/pull-api-ns.md +186 -0
- package/doc/development/pull-frame-state-diagram.jpg +0 -0
- package/doc/distributed.md +566 -0
- package/doc/entity_spec.md +92 -0
- package/doc/gc.md +273 -0
- package/doc/java-api.md +808 -0
- package/doc/javascript-api.md +421 -0
- package/doc/libdatahike.md +86 -0
- package/doc/logging_and_error_handling.md +43 -0
- package/doc/norms.md +66 -0
- package/doc/schema-migration.md +85 -0
- package/doc/schema.md +287 -0
- package/doc/storage-backends.md +363 -0
- package/doc/store-id-refactoring.md +596 -0
- package/doc/time_variance.md +325 -0
- package/doc/unstructured.md +167 -0
- package/doc/versioning.md +261 -0
- package/examples/basic/README.md +19 -0
- package/examples/basic/deps.edn +6 -0
- package/examples/basic/docker-compose.yml +13 -0
- package/examples/basic/src/examples/core.clj +60 -0
- package/examples/basic/src/examples/schema.clj +155 -0
- package/examples/basic/src/examples/store.clj +60 -0
- package/examples/basic/src/examples/time_travel.clj +185 -0
- package/examples/java/.settings/org.eclipse.core.resources.prefs +3 -0
- package/examples/java/.settings/org.eclipse.jdt.apt.core.prefs +2 -0
- package/examples/java/.settings/org.eclipse.jdt.core.prefs +9 -0
- package/examples/java/.settings/org.eclipse.m2e.core.prefs +4 -0
- package/examples/java/README.md +162 -0
- package/examples/java/pom.xml +62 -0
- package/examples/java/src/main/java/examples/QuickStart.java +115 -0
- package/examples/java/src/main/java/examples/SchemaExample.java +148 -0
- package/examples/java/src/main/java/examples/TimeTravelExample.java +121 -0
- package/flake.lock +27 -0
- package/flake.nix +27 -0
- package/http-server/datahike/http/middleware.clj +75 -0
- package/http-server/datahike/http/server.clj +269 -0
- package/java/src/datahike/java/Database.java +274 -0
- package/java/src/datahike/java/Datahike.java +281 -0
- package/java/src/datahike/java/DatahikeGeneratedTest.java +349 -0
- package/java/src/datahike/java/DatahikeTest.java +370 -0
- package/java/src/datahike/java/EDN.java +170 -0
- package/java/src/datahike/java/IEntity.java +11 -0
- package/java/src/datahike/java/Keywords.java +161 -0
- package/java/src/datahike/java/SchemaFlexibility.java +52 -0
- package/java/src/datahike/java/Util.java +219 -0
- package/karma.conf.js +19 -0
- package/libdatahike/compile-cpp +7 -0
- package/libdatahike/src/datahike/impl/LibDatahikeBase.java +203 -0
- package/libdatahike/src/datahike/impl/libdatahike.clj +59 -0
- package/libdatahike/src/test_cpp.cpp +61 -0
- package/npm-package/PUBLISHING.md +140 -0
- package/npm-package/README.md +226 -0
- package/npm-package/package.template.json +34 -0
- package/npm-package/test-isomorphic.ts +281 -0
- package/npm-package/test.js +557 -0
- package/npm-package/typescript-test.ts +70 -0
- package/package.json +16 -0
- package/pydatahike/README.md +569 -0
- package/pydatahike/pyproject.toml +91 -0
- package/pydatahike/setup.py +42 -0
- package/pydatahike/src/datahike/__init__.py +134 -0
- package/pydatahike/src/datahike/_native.py +250 -0
- package/pydatahike/src/datahike/_version.py +2 -0
- package/pydatahike/src/datahike/database.py +722 -0
- package/pydatahike/src/datahike/edn.py +311 -0
- package/pydatahike/src/datahike/py.typed +0 -0
- package/pydatahike/tests/conftest.py +17 -0
- package/pydatahike/tests/test_basic.py +170 -0
- package/pydatahike/tests/test_database.py +51 -0
- package/pydatahike/tests/test_edn_conversion.py +299 -0
- package/pydatahike/tests/test_query.py +99 -0
- package/pydatahike/tests/test_schema.py +55 -0
- package/resources/clj-kondo.exports/io.replikativ/datahike/config.edn +5 -0
- package/resources/example_server.edn +4 -0
- package/shadow-cljs.edn +56 -0
- package/src/data_readers.clj +7 -0
- package/src/datahike/api/impl.cljc +176 -0
- package/src/datahike/api/specification.cljc +633 -0
- package/src/datahike/api/types.cljc +261 -0
- package/src/datahike/api.cljc +41 -0
- package/src/datahike/array.cljc +99 -0
- package/src/datahike/cli.clj +166 -0
- package/src/datahike/cljs.cljs +6 -0
- package/src/datahike/codegen/cli.clj +406 -0
- package/src/datahike/codegen/clj_kondo.clj +291 -0
- package/src/datahike/codegen/java.clj +403 -0
- package/src/datahike/codegen/naming.cljc +33 -0
- package/src/datahike/codegen/native.clj +559 -0
- package/src/datahike/codegen/pod.clj +488 -0
- package/src/datahike/codegen/python.clj +838 -0
- package/src/datahike/codegen/report.clj +55 -0
- package/src/datahike/codegen/typescript.clj +262 -0
- package/src/datahike/codegen/validation.clj +145 -0
- package/src/datahike/config.cljc +294 -0
- package/src/datahike/connections.cljc +16 -0
- package/src/datahike/connector.cljc +265 -0
- package/src/datahike/constants.cljc +142 -0
- package/src/datahike/core.cljc +297 -0
- package/src/datahike/datom.cljc +459 -0
- package/src/datahike/db/interface.cljc +119 -0
- package/src/datahike/db/search.cljc +305 -0
- package/src/datahike/db/transaction.cljc +937 -0
- package/src/datahike/db/utils.cljc +338 -0
- package/src/datahike/db.cljc +956 -0
- package/src/datahike/experimental/unstructured.cljc +126 -0
- package/src/datahike/experimental/versioning.cljc +172 -0
- package/src/datahike/externs.js +31 -0
- package/src/datahike/gc.cljc +69 -0
- package/src/datahike/http/client.clj +188 -0
- package/src/datahike/http/writer.clj +79 -0
- package/src/datahike/impl/entity.cljc +218 -0
- package/src/datahike/index/interface.cljc +93 -0
- package/src/datahike/index/persistent_set.cljc +469 -0
- package/src/datahike/index/utils.cljc +44 -0
- package/src/datahike/index.cljc +32 -0
- package/src/datahike/js/api.cljs +172 -0
- package/src/datahike/js/api_macros.clj +22 -0
- package/src/datahike/js.cljs +163 -0
- package/src/datahike/json.cljc +209 -0
- package/src/datahike/lru.cljc +146 -0
- package/src/datahike/migrate.clj +39 -0
- package/src/datahike/norm/norm.clj +245 -0
- package/src/datahike/online_gc.cljc +252 -0
- package/src/datahike/pod.clj +155 -0
- package/src/datahike/pull_api.cljc +325 -0
- package/src/datahike/query.cljc +1945 -0
- package/src/datahike/query_stats.cljc +88 -0
- package/src/datahike/readers.cljc +62 -0
- package/src/datahike/remote.cljc +218 -0
- package/src/datahike/schema.cljc +228 -0
- package/src/datahike/schema_cache.cljc +42 -0
- package/src/datahike/spec.cljc +101 -0
- package/src/datahike/store.cljc +80 -0
- package/src/datahike/tools.cljc +308 -0
- package/src/datahike/transit.cljc +80 -0
- package/src/datahike/writer.cljc +239 -0
- package/src/datahike/writing.cljc +362 -0
- package/src/deps.cljs +1 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree/insert.cljc +76 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree/upsert.cljc +128 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree.cljc +213 -0
- package/test/datahike/backward_compatibility_test/src/backward_test.clj +37 -0
- package/test/datahike/integration_test/config_record_file_test.clj +14 -0
- package/test/datahike/integration_test/config_record_test.clj +14 -0
- package/test/datahike/integration_test/depr_config_uri_test.clj +15 -0
- package/test/datahike/integration_test/return_map_test.clj +62 -0
- package/test/datahike/integration_test.cljc +67 -0
- package/test/datahike/norm/norm_test.clj +124 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/004-tx-data-and-tx-fn-test.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/01-transact-basic-characters.edn +2 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/02 add occupation.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/checksums.edn +12 -0
- package/test/datahike/norm/resources/simple-test/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/simple-test/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/simple-test/checksums.edn +4 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/checksums.edn +6 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/second/004-tx-data-and-tx-fn-test.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/second/checksums.edn +2 -0
- package/test/datahike/norm/resources/tx-fn-test/first/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/tx-fn-test/first/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/tx-fn-test/first/checksums.edn +4 -0
- package/test/datahike/norm/resources/tx-fn-test/second/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/tx-fn-test/second/checksums.edn +2 -0
- package/test/datahike/test/api_test.cljc +895 -0
- package/test/datahike/test/array_test.cljc +40 -0
- package/test/datahike/test/attribute_refs/datoms_test.cljc +140 -0
- package/test/datahike/test/attribute_refs/db_test.cljc +42 -0
- package/test/datahike/test/attribute_refs/differences_test.cljc +515 -0
- package/test/datahike/test/attribute_refs/entity_test.cljc +89 -0
- package/test/datahike/test/attribute_refs/pull_api_test.cljc +320 -0
- package/test/datahike/test/attribute_refs/query_find_specs_test.cljc +59 -0
- package/test/datahike/test/attribute_refs/query_fns_test.cljc +130 -0
- package/test/datahike/test/attribute_refs/query_interop_test.cljc +47 -0
- package/test/datahike/test/attribute_refs/query_not_test.cljc +193 -0
- package/test/datahike/test/attribute_refs/query_or_test.cljc +137 -0
- package/test/datahike/test/attribute_refs/query_pull_test.cljc +156 -0
- package/test/datahike/test/attribute_refs/query_rules_test.cljc +176 -0
- package/test/datahike/test/attribute_refs/query_test.cljc +241 -0
- package/test/datahike/test/attribute_refs/temporal_search.cljc +22 -0
- package/test/datahike/test/attribute_refs/transact_test.cljc +220 -0
- package/test/datahike/test/attribute_refs/utils.cljc +128 -0
- package/test/datahike/test/cache_test.cljc +38 -0
- package/test/datahike/test/components_test.cljc +92 -0
- package/test/datahike/test/config_test.cljc +158 -0
- package/test/datahike/test/core_test.cljc +105 -0
- package/test/datahike/test/datom_test.cljc +44 -0
- package/test/datahike/test/db_test.cljc +54 -0
- package/test/datahike/test/entity_spec_test.cljc +159 -0
- package/test/datahike/test/entity_test.cljc +103 -0
- package/test/datahike/test/explode_test.cljc +143 -0
- package/test/datahike/test/filter_test.cljc +75 -0
- package/test/datahike/test/gc_test.cljc +159 -0
- package/test/datahike/test/http/server_test.clj +192 -0
- package/test/datahike/test/http/writer_test.clj +86 -0
- package/test/datahike/test/ident_test.cljc +32 -0
- package/test/datahike/test/index_test.cljc +345 -0
- package/test/datahike/test/insert.cljc +125 -0
- package/test/datahike/test/java_bindings_test.clj +6 -0
- package/test/datahike/test/listen_test.cljc +41 -0
- package/test/datahike/test/lookup_refs_test.cljc +266 -0
- package/test/datahike/test/lru_test.cljc +27 -0
- package/test/datahike/test/migrate_test.clj +297 -0
- package/test/datahike/test/model/core.cljc +376 -0
- package/test/datahike/test/model/invariant.cljc +142 -0
- package/test/datahike/test/model/rng.cljc +82 -0
- package/test/datahike/test/model_test.clj +217 -0
- package/test/datahike/test/nodejs_test.cljs +262 -0
- package/test/datahike/test/online_gc_test.cljc +475 -0
- package/test/datahike/test/pod_test.clj +369 -0
- package/test/datahike/test/pull_api_test.cljc +474 -0
- package/test/datahike/test/purge_test.cljc +144 -0
- package/test/datahike/test/query_aggregates_test.cljc +101 -0
- package/test/datahike/test/query_find_specs_test.cljc +52 -0
- package/test/datahike/test/query_fns_test.cljc +523 -0
- package/test/datahike/test/query_interop_test.cljc +47 -0
- package/test/datahike/test/query_not_test.cljc +189 -0
- package/test/datahike/test/query_or_test.cljc +158 -0
- package/test/datahike/test/query_pull_test.cljc +147 -0
- package/test/datahike/test/query_rules_test.cljc +248 -0
- package/test/datahike/test/query_stats_test.cljc +218 -0
- package/test/datahike/test/query_test.cljc +984 -0
- package/test/datahike/test/schema_test.cljc +424 -0
- package/test/datahike/test/specification_test.cljc +30 -0
- package/test/datahike/test/store_test.cljc +78 -0
- package/test/datahike/test/stress_test.cljc +57 -0
- package/test/datahike/test/time_variance_test.cljc +518 -0
- package/test/datahike/test/tools_test.clj +134 -0
- package/test/datahike/test/transact_test.cljc +518 -0
- package/test/datahike/test/tuples_test.cljc +564 -0
- package/test/datahike/test/unstructured_test.cljc +291 -0
- package/test/datahike/test/upsert_impl_test.cljc +205 -0
- package/test/datahike/test/upsert_test.cljc +363 -0
- package/test/datahike/test/utils.cljc +110 -0
- package/test/datahike/test/validation_test.cljc +48 -0
- package/test/datahike/test/versioning_test.cljc +56 -0
- package/test/datahike/test.cljc +66 -0
- package/tests.edn +24 -0
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
(ns examples.time-travel
|
|
2
|
+
(:require [datahike.api :as d]))
|
|
3
|
+
|
|
4
|
+
;; define schema
|
|
5
|
+
(def schema [{:db/ident :name
|
|
6
|
+
:db/valueType :db.type/string
|
|
7
|
+
:db/unique :db.unique/identity
|
|
8
|
+
:db/index true
|
|
9
|
+
:db/cardinality :db.cardinality/one}
|
|
10
|
+
{:db/ident :age
|
|
11
|
+
:db/valueType :db.type/long
|
|
12
|
+
:db/cardinality :db.cardinality/one}])
|
|
13
|
+
|
|
14
|
+
;; define base configuration we can connect to
|
|
15
|
+
(def cfg {:store {:backend :mem
|
|
16
|
+
:id "time-travel"}
|
|
17
|
+
:keep-history? true})
|
|
18
|
+
|
|
19
|
+
;; cleanup any previous data
|
|
20
|
+
(d/delete-database cfg)
|
|
21
|
+
|
|
22
|
+
;; create the database with default configuration and above schema
|
|
23
|
+
(d/create-database cfg)
|
|
24
|
+
|
|
25
|
+
;; connect to the database
|
|
26
|
+
(def conn (d/connect cfg))
|
|
27
|
+
|
|
28
|
+
;; add the schema
|
|
29
|
+
(d/transact conn schema)
|
|
30
|
+
|
|
31
|
+
;; add age and name data
|
|
32
|
+
(d/transact conn [{:name "Alice" :age 25} {:name "Bob" :age 30}])
|
|
33
|
+
|
|
34
|
+
;; let's find name and age of all data
|
|
35
|
+
(def query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]])
|
|
36
|
+
|
|
37
|
+
;; search current data without any new data
|
|
38
|
+
(d/q query @conn);; => #{["Alice" 25] ["Bob" 30]}
|
|
39
|
+
|
|
40
|
+
(def first-date (java.util.Date.))
|
|
41
|
+
|
|
42
|
+
;; let's change something
|
|
43
|
+
(d/transact conn [{:db/id [:name "Alice"] :age 30}])
|
|
44
|
+
|
|
45
|
+
;; search for current data of Alice
|
|
46
|
+
(d/q query @conn);; => #{["Alice" 30] ["Bob" 30]}
|
|
47
|
+
|
|
48
|
+
;; now we search within historical data
|
|
49
|
+
(d/q query (d/history @conn));; => #{["Alice" 30] ["Alice" 25] ["Bob" 30]}
|
|
50
|
+
|
|
51
|
+
;; let's find the dates for each attribute additions.
|
|
52
|
+
;; :db/txInstant is an attribute of the meta entity added to each transaction
|
|
53
|
+
;; and can be treated just as any other data
|
|
54
|
+
(d/q '[:find ?a ?v ?t ?op
|
|
55
|
+
:in $ [?a ...]
|
|
56
|
+
:where
|
|
57
|
+
[?e ?a ?v ?tx ?op]
|
|
58
|
+
[?tx :db/txInstant ?t]]
|
|
59
|
+
(d/history @conn )
|
|
60
|
+
[:name :age]);; => #{[:age 25 #inst "2021-11-04T13:46:54.972-00:00" false]
|
|
61
|
+
;; [:age 25 #inst "2021-11-04T13:46:38.223-00:00" true]
|
|
62
|
+
;; [:name "Alice" #inst "2021-11-04T13:46:38.223-00:00" true]
|
|
63
|
+
;; [:age 30 #inst "2021-11-04T13:46:38.223-00:00" true]
|
|
64
|
+
;; [:name "Bob" #inst "2021-11-04T13:46:38.223-00:00" true]
|
|
65
|
+
;; [:age 30 #inst "2021-11-04T13:46:54.972-00:00" true]}
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
;; next let's get the current data of a specific time
|
|
69
|
+
(d/q query (d/as-of @conn first-date));; => #{["Alice" 25] ["Bob" 30]}
|
|
70
|
+
|
|
71
|
+
;; pull is also supported
|
|
72
|
+
(d/pull (d/as-of @conn first-date) '[*] [:name "Alice"]);; => {:db/id 3, :age 25, :name "Alice"}
|
|
73
|
+
|
|
74
|
+
;; now we want to know any additions after a specific time
|
|
75
|
+
(d/q query (d/since @conn first-date))
|
|
76
|
+
;; => {}, because :name was transacted before the first date
|
|
77
|
+
|
|
78
|
+
;; let's build a query where we use the latest db to find the name and the since db to find out who's age changed
|
|
79
|
+
(d/q '[:find ?n ?a
|
|
80
|
+
:in $ $since
|
|
81
|
+
:where
|
|
82
|
+
[$ ?e :name ?n]
|
|
83
|
+
[$since ?e :age ?a]]
|
|
84
|
+
@conn
|
|
85
|
+
(d/since @conn first-date));; => #{["Alice" 30]}
|
|
86
|
+
|
|
87
|
+
;; let's retract Bob from the current view
|
|
88
|
+
(d/transact conn [[:db/retractEntity [:name "Bob"]]])
|
|
89
|
+
|
|
90
|
+
;; Only Alice remains
|
|
91
|
+
(d/q query @conn);; => #{["Alice" 30]}
|
|
92
|
+
|
|
93
|
+
;; Let's have a look at the history, Bob should be there
|
|
94
|
+
(d/q query (d/history @conn));; => #{["Alice" 30] ["Alice" 25] ["Bob" 30]}
|
|
95
|
+
|
|
96
|
+
;; now we can find when Bob was added and when he was removed
|
|
97
|
+
(d/q '[:find ?d ?op
|
|
98
|
+
:in $ ?e
|
|
99
|
+
:where
|
|
100
|
+
[?e _ _ ?t ?op]
|
|
101
|
+
[?t :db/txInstant ?d]]
|
|
102
|
+
(d/history @conn)
|
|
103
|
+
[:name "Bob"])
|
|
104
|
+
;; => #{[#inst "2021-11-04T13:49:35.353-00:00" false]
|
|
105
|
+
;; [#inst "2021-11-04T13:46:38.223-00:00" true]}
|
|
106
|
+
|
|
107
|
+
;; let's see who else was added with Bob
|
|
108
|
+
(d/q '[:find ?n
|
|
109
|
+
:in $ ?e
|
|
110
|
+
:where
|
|
111
|
+
[?e _ _ ?t true]
|
|
112
|
+
[?e2 :name ?n]] (d/history @conn) [:name "Bob"]);; => #{["Alice"] ["Bob"]}
|
|
113
|
+
|
|
114
|
+
;; let's find the retracted entity ID, its attribute, value, and the date of the changes
|
|
115
|
+
(d/q '[:find ?e ?a ?v ?tx
|
|
116
|
+
:where
|
|
117
|
+
[?e ?a ?v ?r false]
|
|
118
|
+
[?r :db/txInstant ?tx]]
|
|
119
|
+
(d/history @conn))
|
|
120
|
+
;; => #{[4 :age 30 #inst "2021-11-04T13:49:35.353-00:00"]
|
|
121
|
+
;; [3 :age 25 #inst "2021-11-04T13:46:54.972-00:00"]
|
|
122
|
+
;; [4 :name "Bob" #inst "2021-11-04T13:49:35.353-00:00"]}
|
|
123
|
+
|
|
124
|
+
;; you can use db fns to compare dates within datalog: `before?` and `after?`.
|
|
125
|
+
;; let's find all transactions after the first date:
|
|
126
|
+
(d/q '[:find ?e ?a ?v
|
|
127
|
+
:in $ ?fd
|
|
128
|
+
:where
|
|
129
|
+
[?e ?a ?v ?t]
|
|
130
|
+
[?t :db/txInstant ?tx]
|
|
131
|
+
[(after? ?tx ?fd)]]
|
|
132
|
+
@conn
|
|
133
|
+
first-date)
|
|
134
|
+
;; => #{[536870916 :db/txInstant #inst "2021-11-04T13:49:35.353-00:00"]
|
|
135
|
+
;; [3 :age 30]
|
|
136
|
+
;; [536870915 :db/txInstant #inst "2021-11-04T13:46:54.972-00:00"]}
|
|
137
|
+
|
|
138
|
+
;; for convenience you may also use the `<`, `>`, `<=`, `>=` functions
|
|
139
|
+
(d/q '[:find ?e ?a ?v
|
|
140
|
+
:in $ ?fd
|
|
141
|
+
:where
|
|
142
|
+
[?e ?a ?v ?t]
|
|
143
|
+
[?t :db/txInstant ?tx]
|
|
144
|
+
[(> ?tx ?fd)]]
|
|
145
|
+
@conn
|
|
146
|
+
first-date)
|
|
147
|
+
;; => #{[536870916 :db/txInstant #inst "2021-11-04T13:49:35.353-00:00"]
|
|
148
|
+
;; [3 :age 30]
|
|
149
|
+
;; [536870915 :db/txInstant #inst "2021-11-04T13:46:54.972-00:00"]}
|
|
150
|
+
|
|
151
|
+
;; since retraction only removes data from the current view of the data, you may use `purge` to completely remove data
|
|
152
|
+
(d/transact conn [[:db/purge [:name "Alice"] :age 30]])
|
|
153
|
+
|
|
154
|
+
;; Alice's age 30 is not there anymore
|
|
155
|
+
(d/q query (d/history @conn));; => #{["Alice" 25] ["Bob" 30]}
|
|
156
|
+
|
|
157
|
+
;; let's remove Alice's entity completely from our database
|
|
158
|
+
(d/transact conn [[:db.purge/entity [:name "Alice"]]])
|
|
159
|
+
|
|
160
|
+
;; Only Bob remains in the history
|
|
161
|
+
(d/q query (d/history @conn));; => #{["Bob" 30]}
|
|
162
|
+
|
|
163
|
+
;; let's add some more data
|
|
164
|
+
(d/transact conn [{:name "Charlie" :age 45}])
|
|
165
|
+
|
|
166
|
+
(d/q query @conn);; => #{["Charlie" 45]}
|
|
167
|
+
|
|
168
|
+
;; store the current date
|
|
169
|
+
(def before-date (java.util.Date.))
|
|
170
|
+
|
|
171
|
+
;; update Charlie's age
|
|
172
|
+
(d/transact conn [{:db/id [:name "Charlie"] :age 50}])
|
|
173
|
+
|
|
174
|
+
(d/transact conn [{:db/id [:name "Charlie"] :age 55}])
|
|
175
|
+
|
|
176
|
+
(d/q query @conn);; => #{["Charlie" 55]}
|
|
177
|
+
(d/q query (d/history @conn));; => #{["Bob" 30] ["Charlie" 50] ["Charlie" 45] ["Charlie" 55]}
|
|
178
|
+
|
|
179
|
+
;; now let's purge data from temporal index that was added to the temporal index before a specific date
|
|
180
|
+
(d/transact conn [[:db.history.purge/before before-date]])
|
|
181
|
+
|
|
182
|
+
;; Charlie's current age should remain since it is not in the temporal index
|
|
183
|
+
(d/q query @conn);; => #{["Charlie" 55]}
|
|
184
|
+
;; Only the latest data after before-date should be in the history, Charlie with age 45 because it was removed after before-date
|
|
185
|
+
(d/q query (d/history @conn));; => #{["Charlie" 50] ["Charlie" 45] ["Charlie" 55]}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
eclipse.preferences.version=1
|
|
2
|
+
org.eclipse.jdt.core.compiler.codegen.targetPlatform=17
|
|
3
|
+
org.eclipse.jdt.core.compiler.compliance=17
|
|
4
|
+
org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
|
|
5
|
+
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
|
6
|
+
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore
|
|
7
|
+
org.eclipse.jdt.core.compiler.processAnnotations=disabled
|
|
8
|
+
org.eclipse.jdt.core.compiler.release=disabled
|
|
9
|
+
org.eclipse.jdt.core.compiler.source=17
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
# Datahike Java Examples
|
|
2
|
+
|
|
3
|
+
This directory contains example applications demonstrating the Datahike Java API.
|
|
4
|
+
|
|
5
|
+
## Prerequisites
|
|
6
|
+
|
|
7
|
+
- Java 17 or higher (required for text blocks and modern Java features)
|
|
8
|
+
- Maven 3.6+
|
|
9
|
+
|
|
10
|
+
## Running the Examples
|
|
11
|
+
|
|
12
|
+
### Option 1: Maven Exec (Recommended)
|
|
13
|
+
|
|
14
|
+
Run examples directly with Maven:
|
|
15
|
+
|
|
16
|
+
```bash
|
|
17
|
+
# Quick Start
|
|
18
|
+
mvn compile exec:java -Dexec.mainClass="examples.QuickStart"
|
|
19
|
+
|
|
20
|
+
# Schema Example
|
|
21
|
+
mvn compile exec:java -Dexec.mainClass="examples.SchemaExample"
|
|
22
|
+
|
|
23
|
+
# Time Travel Example
|
|
24
|
+
mvn compile exec:java -Dexec.mainClass="examples.TimeTravelExample"
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
### Option 2: Build and Run JAR
|
|
28
|
+
|
|
29
|
+
Build the project and run the JAR:
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
# Build
|
|
33
|
+
mvn clean package
|
|
34
|
+
|
|
35
|
+
# Run (using QuickStart as main class from pom.xml)
|
|
36
|
+
java -jar target/datahike-java-examples-1.0.0-SNAPSHOT.jar
|
|
37
|
+
|
|
38
|
+
# Or run specific examples
|
|
39
|
+
java -cp target/datahike-java-examples-1.0.0-SNAPSHOT.jar examples.SchemaExample
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
### Option 3: IDE
|
|
43
|
+
|
|
44
|
+
Import the Maven project into your IDE (IntelliJ IDEA, Eclipse, VSCode) and run the main classes directly.
|
|
45
|
+
|
|
46
|
+
## Examples
|
|
47
|
+
|
|
48
|
+
### QuickStart.java
|
|
49
|
+
|
|
50
|
+
Basic introduction to Datahike:
|
|
51
|
+
- Database configuration with builder pattern
|
|
52
|
+
- Creating and connecting to databases
|
|
53
|
+
- Transacting data with Java Maps
|
|
54
|
+
- Querying with Datalog
|
|
55
|
+
- Updates and cleanup
|
|
56
|
+
|
|
57
|
+
**Key concepts:** Builder pattern, basic queries, CRUD operations
|
|
58
|
+
|
|
59
|
+
### SchemaExample.java
|
|
60
|
+
|
|
61
|
+
Schema definition and validation:
|
|
62
|
+
- Defining attributes with type constraints
|
|
63
|
+
- Unique constraints (identity vs value)
|
|
64
|
+
- Reference types for relationships
|
|
65
|
+
- Cardinality (one vs many)
|
|
66
|
+
- Using Keywords constants
|
|
67
|
+
|
|
68
|
+
**Key concepts:** Schema definition, relationships, data validation
|
|
69
|
+
|
|
70
|
+
### TimeTravelExample.java
|
|
71
|
+
|
|
72
|
+
Historical queries and time travel:
|
|
73
|
+
- Querying database state at specific points in time
|
|
74
|
+
- Using `asOf` for point-in-time queries
|
|
75
|
+
- Using `since` for change tracking
|
|
76
|
+
- Querying full history
|
|
77
|
+
- Transaction metadata
|
|
78
|
+
|
|
79
|
+
**Key concepts:** Immutability, history, audit trails
|
|
80
|
+
|
|
81
|
+
## Project Structure
|
|
82
|
+
|
|
83
|
+
```
|
|
84
|
+
examples/java/
|
|
85
|
+
├── pom.xml # Maven configuration
|
|
86
|
+
├── README.md # This file
|
|
87
|
+
└── src/main/java/examples/
|
|
88
|
+
├── QuickStart.java # Basic usage
|
|
89
|
+
├── SchemaExample.java # Schema definition
|
|
90
|
+
└── TimeTravelExample.java # Time travel queries
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
## Using Datahike in Your Project
|
|
94
|
+
|
|
95
|
+
To use Datahike in your own Maven project, add to your `pom.xml`:
|
|
96
|
+
|
|
97
|
+
```xml
|
|
98
|
+
<repositories>
|
|
99
|
+
<repository>
|
|
100
|
+
<id>clojars</id>
|
|
101
|
+
<url>https://repo.clojars.org/</url>
|
|
102
|
+
</repository>
|
|
103
|
+
</repositories>
|
|
104
|
+
|
|
105
|
+
<dependencies>
|
|
106
|
+
<dependency>
|
|
107
|
+
<groupId>io.replikativ</groupId>
|
|
108
|
+
<artifactId>datahike</artifactId>
|
|
109
|
+
<version>LATEST_VERSION</version>
|
|
110
|
+
</dependency>
|
|
111
|
+
</dependencies>
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
For Gradle:
|
|
115
|
+
|
|
116
|
+
```gradle
|
|
117
|
+
repositories {
|
|
118
|
+
maven { url "https://repo.clojars.org/" }
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
dependencies {
|
|
122
|
+
implementation 'io.replikativ:datahike:LATEST_VERSION'
|
|
123
|
+
}
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
**Note:** Replace `LATEST_VERSION` with the latest published version from [Clojars](https://clojars.org/io.replikativ/datahike).
|
|
127
|
+
|
|
128
|
+
## Key Java API Classes
|
|
129
|
+
|
|
130
|
+
- **`Datahike`** - Main API with all database operations
|
|
131
|
+
- **`Database`** - Fluent builder for configuration
|
|
132
|
+
- **`Keywords`** - Pre-defined constants for schema
|
|
133
|
+
- **`SchemaFlexibility`** - Enum for schema modes
|
|
134
|
+
- **`Util`** - Low-level utilities (map, vec, kwd)
|
|
135
|
+
- **`EDN`** - EDN data type constructors
|
|
136
|
+
|
|
137
|
+
## Further Reading
|
|
138
|
+
|
|
139
|
+
- [Java API Documentation](../../doc/java-api.md) - Comprehensive API guide
|
|
140
|
+
- [Main README](../../README.md) - Project overview
|
|
141
|
+
- [Schema Guide](../../doc/schema.md) - Detailed schema documentation
|
|
142
|
+
- [Storage Backends](../../doc/storage-backends.md) - Backend configuration
|
|
143
|
+
- [Datalog Tutorial](https://docs.datomic.com/on-prem/query.html) - Query language
|
|
144
|
+
|
|
145
|
+
## Troubleshooting
|
|
146
|
+
|
|
147
|
+
**Build fails with "Could not find artifact"**
|
|
148
|
+
- Ensure Clojars repository is configured
|
|
149
|
+
- Check internet connection
|
|
150
|
+
- Try `mvn clean install -U` to force update
|
|
151
|
+
|
|
152
|
+
**Runtime errors about Clojure**
|
|
153
|
+
- Datahike includes Clojure as a dependency
|
|
154
|
+
- Check for dependency conflicts with `mvn dependency:tree`
|
|
155
|
+
|
|
156
|
+
**ClassCastException in query results**
|
|
157
|
+
- Query results are Clojure collections (Set, List, Map)
|
|
158
|
+
- Cast appropriately: `(Set<?>) Datahike.q(...)`
|
|
159
|
+
|
|
160
|
+
## License
|
|
161
|
+
|
|
162
|
+
Eclipse Public License 1.0 (EPL-1.0)
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
2
|
+
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
3
|
+
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
4
|
+
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
5
|
+
<modelVersion>4.0.0</modelVersion>
|
|
6
|
+
|
|
7
|
+
<groupId>org.replikativ.datahike</groupId>
|
|
8
|
+
<artifactId>datahike-java-examples</artifactId>
|
|
9
|
+
<version>1.0.0-SNAPSHOT</version>
|
|
10
|
+
<packaging>jar</packaging>
|
|
11
|
+
|
|
12
|
+
<name>Datahike Java Examples</name>
|
|
13
|
+
<description>Example applications demonstrating Datahike Java API usage</description>
|
|
14
|
+
|
|
15
|
+
<properties>
|
|
16
|
+
<maven.compiler.source>17</maven.compiler.source>
|
|
17
|
+
<maven.compiler.target>17</maven.compiler.target>
|
|
18
|
+
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
|
19
|
+
<datahike.version>0.7.1663</datahike.version>
|
|
20
|
+
</properties>
|
|
21
|
+
|
|
22
|
+
<repositories>
|
|
23
|
+
<repository>
|
|
24
|
+
<id>clojars</id>
|
|
25
|
+
<name>Clojars</name>
|
|
26
|
+
<url>https://repo.clojars.org/</url>
|
|
27
|
+
</repository>
|
|
28
|
+
</repositories>
|
|
29
|
+
|
|
30
|
+
<dependencies>
|
|
31
|
+
<!-- Datahike -->
|
|
32
|
+
<dependency>
|
|
33
|
+
<groupId>org.replikativ</groupId>
|
|
34
|
+
<artifactId>datahike</artifactId>
|
|
35
|
+
<version>${datahike.version}</version>
|
|
36
|
+
</dependency>
|
|
37
|
+
</dependencies>
|
|
38
|
+
|
|
39
|
+
<build>
|
|
40
|
+
<plugins>
|
|
41
|
+
<plugin>
|
|
42
|
+
<groupId>org.apache.maven.plugins</groupId>
|
|
43
|
+
<artifactId>maven-compiler-plugin</artifactId>
|
|
44
|
+
<version>3.11.0</version>
|
|
45
|
+
</plugin>
|
|
46
|
+
|
|
47
|
+
<!-- Create executable JAR -->
|
|
48
|
+
<plugin>
|
|
49
|
+
<groupId>org.apache.maven.plugins</groupId>
|
|
50
|
+
<artifactId>maven-jar-plugin</artifactId>
|
|
51
|
+
<version>3.3.0</version>
|
|
52
|
+
<configuration>
|
|
53
|
+
<archive>
|
|
54
|
+
<manifest>
|
|
55
|
+
<mainClass>examples.QuickStart</mainClass>
|
|
56
|
+
</manifest>
|
|
57
|
+
</archive>
|
|
58
|
+
</configuration>
|
|
59
|
+
</plugin>
|
|
60
|
+
</plugins>
|
|
61
|
+
</build>
|
|
62
|
+
</project>
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
package examples;
|
|
2
|
+
|
|
3
|
+
import datahike.java.Datahike;
|
|
4
|
+
import datahike.java.Database;
|
|
5
|
+
import datahike.java.SchemaFlexibility;
|
|
6
|
+
|
|
7
|
+
import java.util.*;
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Quick start example demonstrating basic Datahike usage.
|
|
11
|
+
*
|
|
12
|
+
* This example shows:
|
|
13
|
+
* - Database configuration with builder pattern
|
|
14
|
+
* - Creating and connecting to a database
|
|
15
|
+
* - Transacting data
|
|
16
|
+
* - Querying with Datalog
|
|
17
|
+
* - Cleanup
|
|
18
|
+
*/
|
|
19
|
+
public class QuickStart {
|
|
20
|
+
public static void main(String[] args) {
|
|
21
|
+
System.out.println("=== Datahike Java Quick Start ===\n");
|
|
22
|
+
|
|
23
|
+
// 1. Configure database with builder pattern
|
|
24
|
+
System.out.println("1. Configuring database...");
|
|
25
|
+
Map<String, Object> config = Database.memory(UUID.randomUUID())
|
|
26
|
+
.schemaFlexibility(SchemaFlexibility.READ)
|
|
27
|
+
.keepHistory(true)
|
|
28
|
+
.name("quickstart-example")
|
|
29
|
+
.build();
|
|
30
|
+
|
|
31
|
+
// 2. Create and connect to database
|
|
32
|
+
System.out.println("2. Creating database...");
|
|
33
|
+
Datahike.createDatabase(config);
|
|
34
|
+
|
|
35
|
+
System.out.println("3. Connecting to database...");
|
|
36
|
+
Object conn = Datahike.connect(config);
|
|
37
|
+
|
|
38
|
+
// 3. Transact data using Java Maps
|
|
39
|
+
System.out.println("4. Adding data...");
|
|
40
|
+
Datahike.transact(conn, List.of(
|
|
41
|
+
Map.of("name", "Alice", "age", 30, "city", "Berlin"),
|
|
42
|
+
Map.of("name", "Bob", "age", 25, "city", "London"),
|
|
43
|
+
Map.of("name", "Charlie", "age", 35, "city", "Berlin")
|
|
44
|
+
));
|
|
45
|
+
|
|
46
|
+
// 4. Query data with Datalog
|
|
47
|
+
System.out.println("5. Querying data...");
|
|
48
|
+
|
|
49
|
+
// Find all people
|
|
50
|
+
Set<?> allPeople = (Set<?>) Datahike.q(
|
|
51
|
+
"[:find ?name ?age :where [?e :name ?name] [?e :age ?age]]",
|
|
52
|
+
Datahike.deref(conn)
|
|
53
|
+
);
|
|
54
|
+
System.out.println("All people: " + allPeople);
|
|
55
|
+
|
|
56
|
+
// Find people in Berlin
|
|
57
|
+
Set<?> berliners = (Set<?>) Datahike.q(
|
|
58
|
+
"""
|
|
59
|
+
[:find ?name ?age
|
|
60
|
+
:where
|
|
61
|
+
[?e :name ?name]
|
|
62
|
+
[?e :age ?age]
|
|
63
|
+
[?e :city "Berlin"]]
|
|
64
|
+
""",
|
|
65
|
+
Datahike.deref(conn)
|
|
66
|
+
);
|
|
67
|
+
System.out.println("People in Berlin: " + berliners);
|
|
68
|
+
|
|
69
|
+
// Find people over 30
|
|
70
|
+
Set<?> over30 = (Set<?>) Datahike.q(
|
|
71
|
+
"""
|
|
72
|
+
[:find ?name
|
|
73
|
+
:where
|
|
74
|
+
[?e :name ?name]
|
|
75
|
+
[?e :age ?age]
|
|
76
|
+
[(>= ?age 30)]]
|
|
77
|
+
""",
|
|
78
|
+
Datahike.deref(conn)
|
|
79
|
+
);
|
|
80
|
+
System.out.println("People over 30: " + over30);
|
|
81
|
+
|
|
82
|
+
// 5. Update data
|
|
83
|
+
System.out.println("\n6. Updating data...");
|
|
84
|
+
// Find Alice's entity ID first
|
|
85
|
+
Set<?> aliceResult = (Set<?>) Datahike.q(
|
|
86
|
+
"[:find ?e :where [?e :name \"Alice\"]]",
|
|
87
|
+
Datahike.deref(conn)
|
|
88
|
+
);
|
|
89
|
+
|
|
90
|
+
if (!aliceResult.isEmpty()) {
|
|
91
|
+
Object aliceId = ((List<?>) aliceResult.iterator().next()).get(0);
|
|
92
|
+
|
|
93
|
+
// Update Alice's age
|
|
94
|
+
Datahike.transact(conn, List.of(
|
|
95
|
+
Map.of(":db/id", aliceId, "age", 31)
|
|
96
|
+
));
|
|
97
|
+
|
|
98
|
+
System.out.println("Updated Alice's age to 31");
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Query again to see the update
|
|
102
|
+
Set<?> updated = (Set<?>) Datahike.q(
|
|
103
|
+
"[:find ?name ?age :where [?e :name \"Alice\"] [?e :age ?age] [?e :name ?name]]",
|
|
104
|
+
Datahike.deref(conn)
|
|
105
|
+
);
|
|
106
|
+
System.out.println("Alice after update: " + updated);
|
|
107
|
+
|
|
108
|
+
// 6. Cleanup
|
|
109
|
+
System.out.println("\n7. Cleaning up...");
|
|
110
|
+
Datahike.deleteDatabase(config);
|
|
111
|
+
|
|
112
|
+
System.out.println("\n=== Example completed successfully! ===");
|
|
113
|
+
System.exit(0);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
package examples;
|
|
2
|
+
|
|
3
|
+
import datahike.java.Datahike;
|
|
4
|
+
import datahike.java.Database;
|
|
5
|
+
import datahike.java.SchemaFlexibility;
|
|
6
|
+
|
|
7
|
+
import java.util.*;
|
|
8
|
+
|
|
9
|
+
import static datahike.java.Keywords.*;
|
|
10
|
+
import static datahike.java.Util.*;
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Example demonstrating schema definition and validation.
|
|
14
|
+
*
|
|
15
|
+
* Shows:
|
|
16
|
+
* - Defining schema attributes with Keywords constants
|
|
17
|
+
* - Using unique constraints
|
|
18
|
+
* - Reference types for relationships
|
|
19
|
+
* - Cardinality (one vs many)
|
|
20
|
+
*/
|
|
21
|
+
public class SchemaExample {
|
|
22
|
+
public static void main(String[] args) {
|
|
23
|
+
System.out.println("=== Schema Definition Example ===\n");
|
|
24
|
+
|
|
25
|
+
// Define schema using Keywords constants
|
|
26
|
+
System.out.println("1. Defining schema...");
|
|
27
|
+
Object schema = vec(
|
|
28
|
+
// Person name - unique identity
|
|
29
|
+
map(
|
|
30
|
+
DB_IDENT, kwd(":person/name"),
|
|
31
|
+
DB_VALUE_TYPE, STRING,
|
|
32
|
+
DB_CARDINALITY, ONE,
|
|
33
|
+
DB_UNIQUE, UNIQUE_IDENTITY,
|
|
34
|
+
DB_DOC, "Person's full name (unique)"
|
|
35
|
+
),
|
|
36
|
+
// Person email - unique value
|
|
37
|
+
map(
|
|
38
|
+
DB_IDENT, kwd(":person/email"),
|
|
39
|
+
DB_VALUE_TYPE, STRING,
|
|
40
|
+
DB_CARDINALITY, ONE,
|
|
41
|
+
DB_UNIQUE, UNIQUE_VALUE,
|
|
42
|
+
DB_DOC, "Person's email address (unique)"
|
|
43
|
+
),
|
|
44
|
+
// Person age
|
|
45
|
+
map(
|
|
46
|
+
DB_IDENT, kwd(":person/age"),
|
|
47
|
+
DB_VALUE_TYPE, LONG,
|
|
48
|
+
DB_CARDINALITY, ONE,
|
|
49
|
+
DB_DOC, "Person's age in years"
|
|
50
|
+
),
|
|
51
|
+
// Person friends - many references
|
|
52
|
+
map(
|
|
53
|
+
DB_IDENT, kwd(":person/friends"),
|
|
54
|
+
DB_VALUE_TYPE, REF,
|
|
55
|
+
DB_CARDINALITY, MANY,
|
|
56
|
+
DB_DOC, "Person's friends (entity references)"
|
|
57
|
+
),
|
|
58
|
+
// Person skills - many strings
|
|
59
|
+
map(
|
|
60
|
+
DB_IDENT, kwd(":person/skills"),
|
|
61
|
+
DB_VALUE_TYPE, STRING,
|
|
62
|
+
DB_CARDINALITY, MANY,
|
|
63
|
+
DB_DOC, "Person's skills"
|
|
64
|
+
)
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
// Create database with initial schema
|
|
68
|
+
Map<String, Object> config = Database.memory(UUID.randomUUID())
|
|
69
|
+
.initialTx(schema)
|
|
70
|
+
.build();
|
|
71
|
+
|
|
72
|
+
Datahike.createDatabase(config);
|
|
73
|
+
Object conn = Datahike.connect(config);
|
|
74
|
+
|
|
75
|
+
// 2. Add data respecting the schema
|
|
76
|
+
System.out.println("2. Adding people with relationships...");
|
|
77
|
+
|
|
78
|
+
// Using modern Java API - strings auto-convert to keywords
|
|
79
|
+
Datahike.transact(conn, List.of(
|
|
80
|
+
// Alice
|
|
81
|
+
Map.of(
|
|
82
|
+
"person/name", "Alice",
|
|
83
|
+
"person/email", "alice@example.com",
|
|
84
|
+
"person/age", 30L,
|
|
85
|
+
"person/skills", List.of("Java", "Clojure", "Datalog")
|
|
86
|
+
),
|
|
87
|
+
// Bob
|
|
88
|
+
Map.of(
|
|
89
|
+
"person/name", "Bob",
|
|
90
|
+
"person/email", "bob@example.com",
|
|
91
|
+
"person/age", 25L,
|
|
92
|
+
"person/skills", List.of("Python", "SQL")
|
|
93
|
+
)
|
|
94
|
+
));
|
|
95
|
+
|
|
96
|
+
// 3. Use unique identity to reference and update
|
|
97
|
+
System.out.println("3. Using unique identity for upsert...");
|
|
98
|
+
|
|
99
|
+
Datahike.transact(conn, List.of(
|
|
100
|
+
Map.of(
|
|
101
|
+
"person/name", "Alice", // Upsert by unique identity
|
|
102
|
+
"person/age", 31L // Update age
|
|
103
|
+
)
|
|
104
|
+
));
|
|
105
|
+
|
|
106
|
+
// 4. Query relationships
|
|
107
|
+
System.out.println("4. Querying people and skills...");
|
|
108
|
+
|
|
109
|
+
Set<?> peopleWithSkills = (Set<?>) Datahike.q(
|
|
110
|
+
"""
|
|
111
|
+
[:find ?name ?skill
|
|
112
|
+
:where
|
|
113
|
+
[?e :person/name ?name]
|
|
114
|
+
[?e :person/skills ?skill]]
|
|
115
|
+
""",
|
|
116
|
+
Datahike.deref(conn)
|
|
117
|
+
);
|
|
118
|
+
|
|
119
|
+
System.out.println("People and their skills:");
|
|
120
|
+
peopleWithSkills.forEach(System.out::println);
|
|
121
|
+
|
|
122
|
+
// 5. Count aggregates
|
|
123
|
+
Object skillCounts = Datahike.q(
|
|
124
|
+
"""
|
|
125
|
+
[:find ?name (count ?skill)
|
|
126
|
+
:where
|
|
127
|
+
[?e :person/name ?name]
|
|
128
|
+
[?e :person/skills ?skill]]
|
|
129
|
+
""",
|
|
130
|
+
Datahike.deref(conn)
|
|
131
|
+
);
|
|
132
|
+
|
|
133
|
+
System.out.println("\nSkill counts per person:");
|
|
134
|
+
System.out.println(skillCounts);
|
|
135
|
+
|
|
136
|
+
// 6. Verify schema
|
|
137
|
+
System.out.println("\n5. Retrieving schema...");
|
|
138
|
+
Map<?, ?> dbSchema = (Map<?, ?>) Datahike.schema(Datahike.deref(conn));
|
|
139
|
+
|
|
140
|
+
System.out.println("Schema has " + dbSchema.size() + " attributes defined");
|
|
141
|
+
|
|
142
|
+
// Cleanup
|
|
143
|
+
Datahike.deleteDatabase(config);
|
|
144
|
+
|
|
145
|
+
System.out.println("\n=== Example completed successfully! ===");
|
|
146
|
+
System.exit(0);
|
|
147
|
+
}
|
|
148
|
+
}
|