datahike-browser-tests 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.circleci/config.yml +405 -0
- package/.circleci/scripts/gen_ci.clj +194 -0
- package/.cirrus.yml +60 -0
- package/.clj-kondo/babashka/sci/config.edn +1 -0
- package/.clj-kondo/babashka/sci/sci/core.clj +9 -0
- package/.clj-kondo/config.edn +95 -0
- package/.dir-locals.el +2 -0
- package/.github/FUNDING.yml +3 -0
- package/.github/ISSUE_TEMPLATE/1-bug-report.yml +68 -0
- package/.github/ISSUE_TEMPLATE/2-feature-request.yml +28 -0
- package/.github/ISSUE_TEMPLATE/config.yml +6 -0
- package/.github/pull_request_template.md +24 -0
- package/.github/workflows/native-image.yml +84 -0
- package/LICENSE +203 -0
- package/README.md +273 -0
- package/bb/deps.edn +9 -0
- package/bb/resources/github-fingerprints +3 -0
- package/bb/resources/native-image-tests/run-bb-pod-tests.clj +162 -0
- package/bb/resources/native-image-tests/run-libdatahike-tests +12 -0
- package/bb/resources/native-image-tests/run-native-image-tests +74 -0
- package/bb/resources/native-image-tests/run-python-tests +22 -0
- package/bb/resources/native-image-tests/testconfig.attr-refs.edn +6 -0
- package/bb/resources/native-image-tests/testconfig.edn +5 -0
- package/bb/resources/template/.settings/org.eclipse.jdt.apt.core.prefs +2 -0
- package/bb/resources/template/.settings/org.eclipse.jdt.core.prefs +9 -0
- package/bb/resources/template/.settings/org.eclipse.m2e.core.prefs +4 -0
- package/bb/resources/template/pom.xml +22 -0
- package/bb/src/tools/build.clj +132 -0
- package/bb/src/tools/clj_kondo.clj +32 -0
- package/bb/src/tools/deploy.clj +26 -0
- package/bb/src/tools/examples.clj +19 -0
- package/bb/src/tools/npm.clj +100 -0
- package/bb/src/tools/python.clj +14 -0
- package/bb/src/tools/release.clj +94 -0
- package/bb/src/tools/test.clj +148 -0
- package/bb/src/tools/version.clj +47 -0
- package/bb.edn +269 -0
- package/benchmark/src/benchmark/cli.clj +195 -0
- package/benchmark/src/benchmark/compare.clj +157 -0
- package/benchmark/src/benchmark/config.clj +316 -0
- package/benchmark/src/benchmark/measure.clj +187 -0
- package/benchmark/src/benchmark/store.clj +190 -0
- package/benchmark/test/benchmark/measure_test.clj +156 -0
- package/build.clj +30 -0
- package/config.edn +49 -0
- package/deps.edn +138 -0
- package/dev/sandbox.clj +82 -0
- package/dev/sandbox.cljs +127 -0
- package/dev/sandbox_benchmarks.clj +27 -0
- package/dev/sandbox_client.clj +87 -0
- package/dev/sandbox_transact_bench.clj +109 -0
- package/dev/user.clj +79 -0
- package/doc/README.md +96 -0
- package/doc/adl/README.md +6 -0
- package/doc/adl/adr-000-adr.org +28 -0
- package/doc/adl/adr-001-attribute-references.org +15 -0
- package/doc/adl/adr-002-build-tooling.org +54 -0
- package/doc/adl/adr-003-db-meta-data.md +52 -0
- package/doc/adl/adr-004-github-flow.md +40 -0
- package/doc/adl/adr-XYZ-template.md +30 -0
- package/doc/adl/index.org +3 -0
- package/doc/assets/datahike-logo.svg +3 -0
- package/doc/assets/datahiking-invoice.org +85 -0
- package/doc/assets/hhtree2.png +0 -0
- package/doc/assets/network_topology.svg +624 -0
- package/doc/assets/perf.png +0 -0
- package/doc/assets/schema_mindmap.mm +132 -0
- package/doc/assets/schema_mindmap.svg +970 -0
- package/doc/assets/temporal_index.mm +74 -0
- package/doc/backend-development.md +78 -0
- package/doc/bb-pod.md +89 -0
- package/doc/benchmarking.md +360 -0
- package/doc/bindings/edn-conversion.md +383 -0
- package/doc/cli.md +162 -0
- package/doc/cljdoc.edn +27 -0
- package/doc/cljs-support.md +133 -0
- package/doc/config.md +406 -0
- package/doc/contributing.md +114 -0
- package/doc/datalog-vs-sql.md +210 -0
- package/doc/datomic_differences.md +109 -0
- package/doc/development/pull-api-ns.md +186 -0
- package/doc/development/pull-frame-state-diagram.jpg +0 -0
- package/doc/distributed.md +566 -0
- package/doc/entity_spec.md +92 -0
- package/doc/gc.md +273 -0
- package/doc/java-api.md +808 -0
- package/doc/javascript-api.md +421 -0
- package/doc/libdatahike.md +86 -0
- package/doc/logging_and_error_handling.md +43 -0
- package/doc/norms.md +66 -0
- package/doc/schema-migration.md +85 -0
- package/doc/schema.md +287 -0
- package/doc/storage-backends.md +363 -0
- package/doc/store-id-refactoring.md +596 -0
- package/doc/time_variance.md +325 -0
- package/doc/unstructured.md +167 -0
- package/doc/versioning.md +261 -0
- package/examples/basic/README.md +19 -0
- package/examples/basic/deps.edn +6 -0
- package/examples/basic/docker-compose.yml +13 -0
- package/examples/basic/src/examples/core.clj +60 -0
- package/examples/basic/src/examples/schema.clj +155 -0
- package/examples/basic/src/examples/store.clj +60 -0
- package/examples/basic/src/examples/time_travel.clj +185 -0
- package/examples/java/.settings/org.eclipse.core.resources.prefs +3 -0
- package/examples/java/.settings/org.eclipse.jdt.apt.core.prefs +2 -0
- package/examples/java/.settings/org.eclipse.jdt.core.prefs +9 -0
- package/examples/java/.settings/org.eclipse.m2e.core.prefs +4 -0
- package/examples/java/README.md +162 -0
- package/examples/java/pom.xml +62 -0
- package/examples/java/src/main/java/examples/QuickStart.java +115 -0
- package/examples/java/src/main/java/examples/SchemaExample.java +148 -0
- package/examples/java/src/main/java/examples/TimeTravelExample.java +121 -0
- package/flake.lock +27 -0
- package/flake.nix +27 -0
- package/http-server/datahike/http/middleware.clj +75 -0
- package/http-server/datahike/http/server.clj +269 -0
- package/java/src/datahike/java/Database.java +274 -0
- package/java/src/datahike/java/Datahike.java +281 -0
- package/java/src/datahike/java/DatahikeGeneratedTest.java +349 -0
- package/java/src/datahike/java/DatahikeTest.java +370 -0
- package/java/src/datahike/java/EDN.java +170 -0
- package/java/src/datahike/java/IEntity.java +11 -0
- package/java/src/datahike/java/Keywords.java +161 -0
- package/java/src/datahike/java/SchemaFlexibility.java +52 -0
- package/java/src/datahike/java/Util.java +219 -0
- package/karma.conf.js +19 -0
- package/libdatahike/compile-cpp +7 -0
- package/libdatahike/src/datahike/impl/LibDatahikeBase.java +203 -0
- package/libdatahike/src/datahike/impl/libdatahike.clj +59 -0
- package/libdatahike/src/test_cpp.cpp +61 -0
- package/npm-package/PUBLISHING.md +140 -0
- package/npm-package/README.md +226 -0
- package/npm-package/package.template.json +34 -0
- package/npm-package/test-isomorphic.ts +281 -0
- package/npm-package/test.js +557 -0
- package/npm-package/typescript-test.ts +70 -0
- package/package.json +16 -0
- package/pydatahike/README.md +569 -0
- package/pydatahike/pyproject.toml +91 -0
- package/pydatahike/setup.py +42 -0
- package/pydatahike/src/datahike/__init__.py +134 -0
- package/pydatahike/src/datahike/_native.py +250 -0
- package/pydatahike/src/datahike/_version.py +2 -0
- package/pydatahike/src/datahike/database.py +722 -0
- package/pydatahike/src/datahike/edn.py +311 -0
- package/pydatahike/src/datahike/py.typed +0 -0
- package/pydatahike/tests/conftest.py +17 -0
- package/pydatahike/tests/test_basic.py +170 -0
- package/pydatahike/tests/test_database.py +51 -0
- package/pydatahike/tests/test_edn_conversion.py +299 -0
- package/pydatahike/tests/test_query.py +99 -0
- package/pydatahike/tests/test_schema.py +55 -0
- package/resources/clj-kondo.exports/io.replikativ/datahike/config.edn +5 -0
- package/resources/example_server.edn +4 -0
- package/shadow-cljs.edn +56 -0
- package/src/data_readers.clj +7 -0
- package/src/datahike/api/impl.cljc +176 -0
- package/src/datahike/api/specification.cljc +633 -0
- package/src/datahike/api/types.cljc +261 -0
- package/src/datahike/api.cljc +41 -0
- package/src/datahike/array.cljc +99 -0
- package/src/datahike/cli.clj +166 -0
- package/src/datahike/cljs.cljs +6 -0
- package/src/datahike/codegen/cli.clj +406 -0
- package/src/datahike/codegen/clj_kondo.clj +291 -0
- package/src/datahike/codegen/java.clj +403 -0
- package/src/datahike/codegen/naming.cljc +33 -0
- package/src/datahike/codegen/native.clj +559 -0
- package/src/datahike/codegen/pod.clj +488 -0
- package/src/datahike/codegen/python.clj +838 -0
- package/src/datahike/codegen/report.clj +55 -0
- package/src/datahike/codegen/typescript.clj +262 -0
- package/src/datahike/codegen/validation.clj +145 -0
- package/src/datahike/config.cljc +294 -0
- package/src/datahike/connections.cljc +16 -0
- package/src/datahike/connector.cljc +265 -0
- package/src/datahike/constants.cljc +142 -0
- package/src/datahike/core.cljc +297 -0
- package/src/datahike/datom.cljc +459 -0
- package/src/datahike/db/interface.cljc +119 -0
- package/src/datahike/db/search.cljc +305 -0
- package/src/datahike/db/transaction.cljc +937 -0
- package/src/datahike/db/utils.cljc +338 -0
- package/src/datahike/db.cljc +956 -0
- package/src/datahike/experimental/unstructured.cljc +126 -0
- package/src/datahike/experimental/versioning.cljc +172 -0
- package/src/datahike/externs.js +31 -0
- package/src/datahike/gc.cljc +69 -0
- package/src/datahike/http/client.clj +188 -0
- package/src/datahike/http/writer.clj +79 -0
- package/src/datahike/impl/entity.cljc +218 -0
- package/src/datahike/index/interface.cljc +93 -0
- package/src/datahike/index/persistent_set.cljc +469 -0
- package/src/datahike/index/utils.cljc +44 -0
- package/src/datahike/index.cljc +32 -0
- package/src/datahike/js/api.cljs +172 -0
- package/src/datahike/js/api_macros.clj +22 -0
- package/src/datahike/js.cljs +163 -0
- package/src/datahike/json.cljc +209 -0
- package/src/datahike/lru.cljc +146 -0
- package/src/datahike/migrate.clj +39 -0
- package/src/datahike/norm/norm.clj +245 -0
- package/src/datahike/online_gc.cljc +252 -0
- package/src/datahike/pod.clj +155 -0
- package/src/datahike/pull_api.cljc +325 -0
- package/src/datahike/query.cljc +1945 -0
- package/src/datahike/query_stats.cljc +88 -0
- package/src/datahike/readers.cljc +62 -0
- package/src/datahike/remote.cljc +218 -0
- package/src/datahike/schema.cljc +228 -0
- package/src/datahike/schema_cache.cljc +42 -0
- package/src/datahike/spec.cljc +101 -0
- package/src/datahike/store.cljc +80 -0
- package/src/datahike/tools.cljc +308 -0
- package/src/datahike/transit.cljc +80 -0
- package/src/datahike/writer.cljc +239 -0
- package/src/datahike/writing.cljc +362 -0
- package/src/deps.cljs +1 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree/insert.cljc +76 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree/upsert.cljc +128 -0
- package/src-hitchhiker-tree/datahike/index/hitchhiker_tree.cljc +213 -0
- package/test/datahike/backward_compatibility_test/src/backward_test.clj +37 -0
- package/test/datahike/integration_test/config_record_file_test.clj +14 -0
- package/test/datahike/integration_test/config_record_test.clj +14 -0
- package/test/datahike/integration_test/depr_config_uri_test.clj +15 -0
- package/test/datahike/integration_test/return_map_test.clj +62 -0
- package/test/datahike/integration_test.cljc +67 -0
- package/test/datahike/norm/norm_test.clj +124 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/004-tx-data-and-tx-fn-test.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/01-transact-basic-characters.edn +2 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/02 add occupation.edn +5 -0
- package/test/datahike/norm/resources/naming-and-sorting-test/checksums.edn +12 -0
- package/test/datahike/norm/resources/simple-test/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/simple-test/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/simple-test/checksums.edn +4 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/checksums.edn +6 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/second/004-tx-data-and-tx-fn-test.edn +5 -0
- package/test/datahike/norm/resources/tx-data-and-tx-fn-test/second/checksums.edn +2 -0
- package/test/datahike/norm/resources/tx-fn-test/first/001-a1-example.edn +5 -0
- package/test/datahike/norm/resources/tx-fn-test/first/002-a2-example.edn +5 -0
- package/test/datahike/norm/resources/tx-fn-test/first/checksums.edn +4 -0
- package/test/datahike/norm/resources/tx-fn-test/second/003-tx-fn-test.edn +1 -0
- package/test/datahike/norm/resources/tx-fn-test/second/checksums.edn +2 -0
- package/test/datahike/test/api_test.cljc +895 -0
- package/test/datahike/test/array_test.cljc +40 -0
- package/test/datahike/test/attribute_refs/datoms_test.cljc +140 -0
- package/test/datahike/test/attribute_refs/db_test.cljc +42 -0
- package/test/datahike/test/attribute_refs/differences_test.cljc +515 -0
- package/test/datahike/test/attribute_refs/entity_test.cljc +89 -0
- package/test/datahike/test/attribute_refs/pull_api_test.cljc +320 -0
- package/test/datahike/test/attribute_refs/query_find_specs_test.cljc +59 -0
- package/test/datahike/test/attribute_refs/query_fns_test.cljc +130 -0
- package/test/datahike/test/attribute_refs/query_interop_test.cljc +47 -0
- package/test/datahike/test/attribute_refs/query_not_test.cljc +193 -0
- package/test/datahike/test/attribute_refs/query_or_test.cljc +137 -0
- package/test/datahike/test/attribute_refs/query_pull_test.cljc +156 -0
- package/test/datahike/test/attribute_refs/query_rules_test.cljc +176 -0
- package/test/datahike/test/attribute_refs/query_test.cljc +241 -0
- package/test/datahike/test/attribute_refs/temporal_search.cljc +22 -0
- package/test/datahike/test/attribute_refs/transact_test.cljc +220 -0
- package/test/datahike/test/attribute_refs/utils.cljc +128 -0
- package/test/datahike/test/cache_test.cljc +38 -0
- package/test/datahike/test/components_test.cljc +92 -0
- package/test/datahike/test/config_test.cljc +158 -0
- package/test/datahike/test/core_test.cljc +105 -0
- package/test/datahike/test/datom_test.cljc +44 -0
- package/test/datahike/test/db_test.cljc +54 -0
- package/test/datahike/test/entity_spec_test.cljc +159 -0
- package/test/datahike/test/entity_test.cljc +103 -0
- package/test/datahike/test/explode_test.cljc +143 -0
- package/test/datahike/test/filter_test.cljc +75 -0
- package/test/datahike/test/gc_test.cljc +159 -0
- package/test/datahike/test/http/server_test.clj +192 -0
- package/test/datahike/test/http/writer_test.clj +86 -0
- package/test/datahike/test/ident_test.cljc +32 -0
- package/test/datahike/test/index_test.cljc +345 -0
- package/test/datahike/test/insert.cljc +125 -0
- package/test/datahike/test/java_bindings_test.clj +6 -0
- package/test/datahike/test/listen_test.cljc +41 -0
- package/test/datahike/test/lookup_refs_test.cljc +266 -0
- package/test/datahike/test/lru_test.cljc +27 -0
- package/test/datahike/test/migrate_test.clj +297 -0
- package/test/datahike/test/model/core.cljc +376 -0
- package/test/datahike/test/model/invariant.cljc +142 -0
- package/test/datahike/test/model/rng.cljc +82 -0
- package/test/datahike/test/model_test.clj +217 -0
- package/test/datahike/test/nodejs_test.cljs +262 -0
- package/test/datahike/test/online_gc_test.cljc +475 -0
- package/test/datahike/test/pod_test.clj +369 -0
- package/test/datahike/test/pull_api_test.cljc +474 -0
- package/test/datahike/test/purge_test.cljc +144 -0
- package/test/datahike/test/query_aggregates_test.cljc +101 -0
- package/test/datahike/test/query_find_specs_test.cljc +52 -0
- package/test/datahike/test/query_fns_test.cljc +523 -0
- package/test/datahike/test/query_interop_test.cljc +47 -0
- package/test/datahike/test/query_not_test.cljc +189 -0
- package/test/datahike/test/query_or_test.cljc +158 -0
- package/test/datahike/test/query_pull_test.cljc +147 -0
- package/test/datahike/test/query_rules_test.cljc +248 -0
- package/test/datahike/test/query_stats_test.cljc +218 -0
- package/test/datahike/test/query_test.cljc +984 -0
- package/test/datahike/test/schema_test.cljc +424 -0
- package/test/datahike/test/specification_test.cljc +30 -0
- package/test/datahike/test/store_test.cljc +78 -0
- package/test/datahike/test/stress_test.cljc +57 -0
- package/test/datahike/test/time_variance_test.cljc +518 -0
- package/test/datahike/test/tools_test.clj +134 -0
- package/test/datahike/test/transact_test.cljc +518 -0
- package/test/datahike/test/tuples_test.cljc +564 -0
- package/test/datahike/test/unstructured_test.cljc +291 -0
- package/test/datahike/test/upsert_impl_test.cljc +205 -0
- package/test/datahike/test/upsert_test.cljc +363 -0
- package/test/datahike/test/utils.cljc +110 -0
- package/test/datahike/test/validation_test.cljc +48 -0
- package/test/datahike/test/versioning_test.cljc +56 -0
- package/test/datahike/test.cljc +66 -0
- package/tests.edn +24 -0
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
(ns datahike.experimental.unstructured
|
|
2
|
+
"Experimental feature for unstructured data input through schema inference.
|
|
3
|
+
This namespace provides functions to convert unstructured EDN/JSON-like data
|
|
4
|
+
into Datahike's transaction format with automatic schema generation."
|
|
5
|
+
(:require
|
|
6
|
+
[datahike.api :as d]
|
|
7
|
+
[datahike.db.interface :as dbi]))
|
|
8
|
+
|
|
9
|
+
(defn value->type
|
|
10
|
+
"Determine the Datahike valueType from a value."
|
|
11
|
+
[v]
|
|
12
|
+
(cond
|
|
13
|
+
(int? v) :db.type/long
|
|
14
|
+
(float? v) :db.type/float
|
|
15
|
+
(double? v) :db.type/double
|
|
16
|
+
(number? v) :db.type/number
|
|
17
|
+
(string? v) :db.type/string
|
|
18
|
+
(boolean? v) :db.type/boolean
|
|
19
|
+
(keyword? v) :db.type/keyword
|
|
20
|
+
(symbol? v) :db.type/symbol
|
|
21
|
+
(uuid? v) :db.type/uuid
|
|
22
|
+
(inst? v) :db.type/instant
|
|
23
|
+
(map? v) :db.type/ref
|
|
24
|
+
(bytes? v) :db.type/bytes
|
|
25
|
+
;; Handle nil values - default to string for now
|
|
26
|
+
(nil? v) :db.type/string
|
|
27
|
+
:else :db.type/string))
|
|
28
|
+
|
|
29
|
+
(defn infer-value-schema
|
|
30
|
+
"Create a schema entry for a given attribute and value."
|
|
31
|
+
[attr v]
|
|
32
|
+
(cond
|
|
33
|
+
(vector? v)
|
|
34
|
+
(when (seq v)
|
|
35
|
+
{:db/ident attr
|
|
36
|
+
:db/valueType (value->type (first v))
|
|
37
|
+
:db/cardinality :db.cardinality/many})
|
|
38
|
+
|
|
39
|
+
(map? v)
|
|
40
|
+
{:db/ident attr
|
|
41
|
+
:db/valueType :db.type/ref
|
|
42
|
+
:db/cardinality :db.cardinality/one}
|
|
43
|
+
|
|
44
|
+
:else
|
|
45
|
+
{:db/ident attr
|
|
46
|
+
:db/valueType (value->type v)
|
|
47
|
+
:db/cardinality :db.cardinality/one}))
|
|
48
|
+
|
|
49
|
+
(defn process-unstructured-data
|
|
50
|
+
"Process unstructured data recursively, converting it to Datahike transactions,
|
|
51
|
+
while inferring schema. Returns a map with :schema and :tx-data."
|
|
52
|
+
[data]
|
|
53
|
+
(let [res (atom [])
|
|
54
|
+
temp-id (atom 0)
|
|
55
|
+
schema (atom [])]
|
|
56
|
+
((fn eval-data [data]
|
|
57
|
+
(cond
|
|
58
|
+
(map? data)
|
|
59
|
+
(let [new-map (into {} (map (fn [[k v]]
|
|
60
|
+
(when-let [schema-entry (infer-value-schema k v)]
|
|
61
|
+
(swap! schema conj schema-entry))
|
|
62
|
+
[k (eval-data v)])
|
|
63
|
+
data))
|
|
64
|
+
map-id (swap! temp-id dec)
|
|
65
|
+
new-map (assoc new-map :db/id map-id)]
|
|
66
|
+
(swap! res conj new-map)
|
|
67
|
+
map-id)
|
|
68
|
+
|
|
69
|
+
(vector? data)
|
|
70
|
+
(mapv eval-data data)
|
|
71
|
+
|
|
72
|
+
:else
|
|
73
|
+
data))
|
|
74
|
+
data)
|
|
75
|
+
{:schema (vec (distinct (remove nil? @schema)))
|
|
76
|
+
:tx-data @res}))
|
|
77
|
+
|
|
78
|
+
(defn check-schema-compatibility
|
|
79
|
+
"Check if the inferred schema is compatible with the database's existing schema.
|
|
80
|
+
Returns a map with :compatible? and :conflicts."
|
|
81
|
+
[db inferred-schema]
|
|
82
|
+
(let [db-schema (dbi/-schema db)
|
|
83
|
+
conflicts (atom [])]
|
|
84
|
+
(doseq [{:keys [db/ident db/valueType db/cardinality] :as attr-schema} inferred-schema]
|
|
85
|
+
(when-let [existing-schema (get db-schema ident)]
|
|
86
|
+
(when (not= (:db/valueType existing-schema) valueType)
|
|
87
|
+
(swap! conflicts conj {:attr ident
|
|
88
|
+
:conflict :value-type
|
|
89
|
+
:existing (:db/valueType existing-schema)
|
|
90
|
+
:inferred valueType}))
|
|
91
|
+
(when (not= (:db/cardinality existing-schema) cardinality)
|
|
92
|
+
(swap! conflicts conj {:attr ident
|
|
93
|
+
:conflict :cardinality
|
|
94
|
+
:existing (:db/cardinality existing-schema)
|
|
95
|
+
:inferred cardinality}))))
|
|
96
|
+
{:compatible? (empty? @conflicts)
|
|
97
|
+
:conflicts @conflicts}))
|
|
98
|
+
|
|
99
|
+
(defn prepare-transaction
|
|
100
|
+
"Prepare a transaction for unstructured data based on the database configuration.
|
|
101
|
+
For schema-on-read databases, simply returns the transaction data.
|
|
102
|
+
For schema-on-write databases, adds necessary schema definitions first."
|
|
103
|
+
[db data]
|
|
104
|
+
(let [result (process-unstructured-data data)
|
|
105
|
+
schema-flexibility (get-in (dbi/-config db) [:schema-flexibility])]
|
|
106
|
+
(if (= schema-flexibility :read)
|
|
107
|
+
;; For schema-on-read, just return the transaction data
|
|
108
|
+
(:tx-data result)
|
|
109
|
+
;; For schema-on-write, we need to check compatibility and add schema
|
|
110
|
+
(let [compatibility (check-schema-compatibility db (:schema result))]
|
|
111
|
+
(if (:compatible? compatibility)
|
|
112
|
+
;; Add schema first, then data
|
|
113
|
+
(concat (:schema result) (:tx-data result))
|
|
114
|
+
;; Not compatible - throw an error with details
|
|
115
|
+
(throw (ex-info "Schema conflict detected with existing database schema"
|
|
116
|
+
{:conflicts (:conflicts compatibility)
|
|
117
|
+
:inferred-schema (:schema result)})))))))
|
|
118
|
+
|
|
119
|
+
(defn transact-unstructured
|
|
120
|
+
"Public API for transacting unstructured data. Takes a connection and unstructured data.
|
|
121
|
+
Automatically determines required schema and applies it before inserting the data."
|
|
122
|
+
[conn data]
|
|
123
|
+
(let [db (deref conn)
|
|
124
|
+
tx-data (prepare-transaction db data)]
|
|
125
|
+
;; Use the standard transaction API
|
|
126
|
+
(d/transact conn tx-data)))
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
(ns datahike.experimental.versioning
|
|
2
|
+
"Git-like versioning tools for Datahike."
|
|
3
|
+
(:require [konserve.core :as k]
|
|
4
|
+
[datahike.connections :refer [delete-connection!]]
|
|
5
|
+
[datahike.core :refer [with]]
|
|
6
|
+
[datahike.store :refer [store-identity]]
|
|
7
|
+
[datahike.writing :refer [stored->db db->stored stored-db?
|
|
8
|
+
complete-db-update commit! create-commit-id get-and-clear-pending-kvs!
|
|
9
|
+
write-pending-kvs!]]
|
|
10
|
+
[superv.async :refer [<? S go-loop-try]]
|
|
11
|
+
[datahike.db.utils :refer [db?]]
|
|
12
|
+
[datahike.tools :as dt]
|
|
13
|
+
[konserve.utils :refer [multi-key-capable?]]))
|
|
14
|
+
|
|
15
|
+
(defn- branch-check [branch]
|
|
16
|
+
(when-not (keyword? branch)
|
|
17
|
+
(dt/raise "Branch must be a keyword." {:type :branch-must-be-uuid :branch branch})))
|
|
18
|
+
|
|
19
|
+
(defn- db-check [db]
|
|
20
|
+
(when-not (db? db)
|
|
21
|
+
(dt/raise "You must provide a DB value." {:type :db-value-required :db db})))
|
|
22
|
+
|
|
23
|
+
(defn- parent-check [parents]
|
|
24
|
+
(when-not (pos? (count parents))
|
|
25
|
+
(dt/raise "You must provide at least one parent."
|
|
26
|
+
{:type :must-provide-at-least-one-parent :parents parents})))
|
|
27
|
+
|
|
28
|
+
(defn- commit-id-check [commit-id]
|
|
29
|
+
(when-not (uuid? commit-id)
|
|
30
|
+
(dt/raise "Commit-id must be a uuid."
|
|
31
|
+
{:type :commit-id-must-be-uuid :commit-id commit-id})))
|
|
32
|
+
|
|
33
|
+
;; ========================= public API =========================
|
|
34
|
+
|
|
35
|
+
(defn branch-history
|
|
36
|
+
"Returns a go-channel with the commit history of the branch of the connection in
|
|
37
|
+
form of all stored db values. Performs backtracking and returns dbs in order."
|
|
38
|
+
[conn]
|
|
39
|
+
(let [{:keys [store] {:keys [branch]} :config} @conn]
|
|
40
|
+
(go-loop-try S [[to-check & r] [branch]
|
|
41
|
+
visited #{}
|
|
42
|
+
reachable []]
|
|
43
|
+
(if to-check
|
|
44
|
+
(if (visited to-check) ;; skip
|
|
45
|
+
(recur r visited reachable)
|
|
46
|
+
(if-let [raw-db (<? S (k/get store to-check))]
|
|
47
|
+
(let [{{:keys [datahike/parents]} :meta
|
|
48
|
+
:as db} (stored->db raw-db store)]
|
|
49
|
+
(recur (concat r parents)
|
|
50
|
+
(conj visited to-check)
|
|
51
|
+
(conj reachable db)))
|
|
52
|
+
reachable))
|
|
53
|
+
reachable))))
|
|
54
|
+
|
|
55
|
+
(defn branch!
|
|
56
|
+
"Create a new branch from commit-id or existing branch as new-branch."
|
|
57
|
+
[conn from new-branch]
|
|
58
|
+
(let [store (:store @conn)
|
|
59
|
+
branches (k/get store :branches nil {:sync? true})
|
|
60
|
+
_ (when (branches new-branch)
|
|
61
|
+
(dt/raise "Branch already exists." {:type :branch-already-exists
|
|
62
|
+
:new-branch new-branch}))
|
|
63
|
+
db (k/get store from nil {:sync? true})]
|
|
64
|
+
(when-not (stored-db? db)
|
|
65
|
+
(throw (ex-info "From does not point to an existing branch or commit."
|
|
66
|
+
{:type :from-branch-does-not-point-to-existing-branch-or-commit
|
|
67
|
+
:from from})))
|
|
68
|
+
(k/assoc store new-branch (assoc-in db [:config :branch] new-branch) {:sync? true})
|
|
69
|
+
(k/update store :branches #(conj (set %) new-branch) {:sync? true})))
|
|
70
|
+
|
|
71
|
+
(defn delete-branch!
|
|
72
|
+
"Removes this branch from set of known branches. The branch will still be
|
|
73
|
+
accessible until the next gc. Remote readers need to release their connections."
|
|
74
|
+
[conn branch]
|
|
75
|
+
(when (= branch :db)
|
|
76
|
+
(dt/raise "Cannot delete main :db branch. Delete database instead."
|
|
77
|
+
{:type :cannot-delete-main-db-branch}))
|
|
78
|
+
(let [store (:store @conn)
|
|
79
|
+
branches (k/get store :branches nil {:sync? true})]
|
|
80
|
+
(when-not (branches branch)
|
|
81
|
+
(dt/raise "Branch does not exist." {:type :branch-does-not-exist
|
|
82
|
+
:branch branch}))
|
|
83
|
+
(delete-connection! [(store-identity (get-in @conn [:config :store])) branch])
|
|
84
|
+
(k/update store :branches #(disj (set %) branch) {:sync? true})))
|
|
85
|
+
|
|
86
|
+
(defn force-branch!
|
|
87
|
+
"Force the branch to point to the provided db value. Branch will be created if
|
|
88
|
+
it does not exist. Parents need to point to a set of branches or commits.
|
|
89
|
+
|
|
90
|
+
Be careful with this command as you can render a db inaccessible by corrupting
|
|
91
|
+
a branch. You will also conflict with existing connections to the branch so
|
|
92
|
+
make sure to close them before forcing the branch."
|
|
93
|
+
[db branch parents]
|
|
94
|
+
(db-check db)
|
|
95
|
+
(branch-check branch)
|
|
96
|
+
(parent-check parents)
|
|
97
|
+
(let [store (:store db)
|
|
98
|
+
cid (create-commit-id db)
|
|
99
|
+
db-with-meta (-> db
|
|
100
|
+
(assoc-in [:config :branch] branch)
|
|
101
|
+
(assoc-in [:meta :datahike/parents] parents)
|
|
102
|
+
(assoc-in [:meta :datahike/commit-id] cid))
|
|
103
|
+
;; db->stored now returns [schema-meta-kv-to-write db-to-store]
|
|
104
|
+
;; and index flushes will have populated pending-writes
|
|
105
|
+
[schema-meta-kv-to-write db-to-store] (db->stored db-with-meta true)
|
|
106
|
+
;; Get all pending [k v] pairs (e.g., from index flushes)
|
|
107
|
+
pending-kvs (get-and-clear-pending-kvs! store)]
|
|
108
|
+
|
|
109
|
+
;; Update the set of known branches
|
|
110
|
+
(k/update store :branches #(conj (set %) branch) {:sync? true})
|
|
111
|
+
|
|
112
|
+
;; Write all data synchronously
|
|
113
|
+
(if (multi-key-capable? store)
|
|
114
|
+
(let [writes-map (cond-> (into {} pending-kvs) ; Initialize with pending KVs
|
|
115
|
+
schema-meta-kv-to-write (assoc (first schema-meta-kv-to-write) (second schema-meta-kv-to-write))
|
|
116
|
+
true (assoc cid db-to-store)
|
|
117
|
+
true (assoc branch db-to-store))]
|
|
118
|
+
(k/multi-assoc store writes-map {:sync? true}))
|
|
119
|
+
(do
|
|
120
|
+
;; Use the helper function to write pending KVs (synchronously)
|
|
121
|
+
(write-pending-kvs! store pending-kvs true)
|
|
122
|
+
;; Then write schema-meta
|
|
123
|
+
(when schema-meta-kv-to-write
|
|
124
|
+
(k/assoc store (first schema-meta-kv-to-write) (second schema-meta-kv-to-write) {:sync? true}))
|
|
125
|
+
;; Then write commit-log and branch head
|
|
126
|
+
(k/assoc store cid db-to-store {:sync? true})
|
|
127
|
+
(k/assoc store branch db-to-store {:sync? true})))
|
|
128
|
+
nil))
|
|
129
|
+
|
|
130
|
+
(defn commit-id
|
|
131
|
+
"Retrieve the commit-id for this db."
|
|
132
|
+
[db]
|
|
133
|
+
(db-check db)
|
|
134
|
+
(get-in db [:meta :datahike/commit-id]))
|
|
135
|
+
|
|
136
|
+
(defn parent-commit-ids
|
|
137
|
+
"Retrieve parent commit ids from db."
|
|
138
|
+
[db]
|
|
139
|
+
(db-check db)
|
|
140
|
+
(get-in db [:meta :datahike/parents]))
|
|
141
|
+
|
|
142
|
+
(defn commit-as-db
|
|
143
|
+
"Loads the database stored at this commit id."
|
|
144
|
+
[store commit-id]
|
|
145
|
+
(commit-id-check commit-id)
|
|
146
|
+
(when-let [raw-db (k/get store commit-id nil {:sync? true})]
|
|
147
|
+
(stored->db raw-db store)))
|
|
148
|
+
|
|
149
|
+
(defn branch-as-db
|
|
150
|
+
"Loads the database stored at this branch."
|
|
151
|
+
[store branch]
|
|
152
|
+
(branch-check branch)
|
|
153
|
+
(when-let [raw-db (k/get store branch nil {:sync? true})]
|
|
154
|
+
(stored->db raw-db store)))
|
|
155
|
+
|
|
156
|
+
(defn merge!
|
|
157
|
+
"Create a merge commit to the current branch of this connection for parent
|
|
158
|
+
commit uuids. It is the responsibility of the caller to make sure that tx-data
|
|
159
|
+
contains the data to be merged into the branch from the parents. This function
|
|
160
|
+
ensures that the parent commits are properly tracked.
|
|
161
|
+
|
|
162
|
+
NOTE: Currently merge! requires that you release all connections to conn and reconnect afterwards to reset the writer state. This will be fixed in the future by handling merge! through the writer."
|
|
163
|
+
([conn parents tx-data]
|
|
164
|
+
(merge! conn parents tx-data nil))
|
|
165
|
+
([conn parents tx-data tx-meta]
|
|
166
|
+
(parent-check parents)
|
|
167
|
+
(let [old @conn
|
|
168
|
+
db (:db-after (complete-db-update old (with old tx-data tx-meta)))
|
|
169
|
+
parents (conj parents (get-in old [:config :branch]))
|
|
170
|
+
commit-db (commit! db parents)]
|
|
171
|
+
(reset! conn commit-db)
|
|
172
|
+
true)))
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
var datahike = {};
|
|
2
|
+
datahike.db = {};
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @constructor
|
|
6
|
+
*/
|
|
7
|
+
datahike.db.Datom = function() {};
|
|
8
|
+
datahike.db.Datom.prototype.e;
|
|
9
|
+
datahike.db.Datom.prototype.a;
|
|
10
|
+
datahike.db.Datom.prototype.v;
|
|
11
|
+
datahike.db.Datom.prototype.tx;
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
datahike.impl = {};
|
|
15
|
+
datahike.impl.entity = {};
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* @constructor
|
|
19
|
+
*/
|
|
20
|
+
datahike.impl.entity.Entity = function() {};
|
|
21
|
+
datahike.impl.entity.Entity.prototype.db;
|
|
22
|
+
datahike.impl.entity.Entity.prototype.eid;
|
|
23
|
+
datahike.impl.entity.Entity.prototype.keys = function() {};
|
|
24
|
+
datahike.impl.entity.Entity.prototype.entries = function() {};
|
|
25
|
+
datahike.impl.entity.Entity.prototype.values = function() {};
|
|
26
|
+
datahike.impl.entity.Entity.prototype.has = function() {};
|
|
27
|
+
datahike.impl.entity.Entity.prototype.get = function() {};
|
|
28
|
+
datahike.impl.entity.Entity.prototype.forEach = function() {};
|
|
29
|
+
datahike.impl.entity.Entity.prototype.key_set = function() {};
|
|
30
|
+
datahike.impl.entity.Entity.prototype.entry_set = function() {};
|
|
31
|
+
datahike.impl.entity.Entity.prototype.value_set = function() {};
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
(ns datahike.gc
|
|
2
|
+
(:require [clojure.set :as set]
|
|
3
|
+
[datahike.index.interface :refer [-mark]]
|
|
4
|
+
[konserve.core :as k]
|
|
5
|
+
[konserve.gc :refer [sweep!]]
|
|
6
|
+
[taoensso.timbre :refer [debug trace]]
|
|
7
|
+
[superv.async :refer [<? S go-try <<?]]
|
|
8
|
+
[clojure.core.async :as async]
|
|
9
|
+
[datahike.schema-cache :as sc])
|
|
10
|
+
#?(:clj (:import [java.util Date])))
|
|
11
|
+
|
|
12
|
+
;; meta-data does not get passed in macros
|
|
13
|
+
(defn get-time [d]
|
|
14
|
+
(.getTime ^Date d))
|
|
15
|
+
|
|
16
|
+
(defn- reachable-in-branch [store branch after-date config]
|
|
17
|
+
(go-try S
|
|
18
|
+
(let [head-cid (<? S (k/get-in store [branch :meta :datahike/commit-id]))]
|
|
19
|
+
(loop [[to-check & r] [branch]
|
|
20
|
+
visited #{}
|
|
21
|
+
reachable #{branch head-cid}]
|
|
22
|
+
(if to-check
|
|
23
|
+
(if (visited to-check) ;; skip
|
|
24
|
+
(recur r visited reachable)
|
|
25
|
+
(let [{:keys [eavt-key avet-key aevt-key
|
|
26
|
+
temporal-eavt-key temporal-avet-key temporal-aevt-key
|
|
27
|
+
schema-meta-key]
|
|
28
|
+
{:keys [datahike/parents
|
|
29
|
+
datahike/created-at
|
|
30
|
+
datahike/updated-at]} :meta}
|
|
31
|
+
(<? S (k/get store to-check))
|
|
32
|
+
in-range? (> (get-time (or updated-at created-at))
|
|
33
|
+
(get-time after-date))]
|
|
34
|
+
(recur (concat r (when in-range? parents))
|
|
35
|
+
(conj visited to-check)
|
|
36
|
+
(set/union reachable #{to-check}
|
|
37
|
+
(when schema-meta-key #{schema-meta-key})
|
|
38
|
+
(-mark eavt-key)
|
|
39
|
+
(-mark aevt-key)
|
|
40
|
+
(-mark avet-key)
|
|
41
|
+
(when (:keep-history? config)
|
|
42
|
+
(set/union
|
|
43
|
+
(-mark temporal-eavt-key)
|
|
44
|
+
(-mark temporal-aevt-key)
|
|
45
|
+
(-mark temporal-avet-key)))))))
|
|
46
|
+
reachable)))))
|
|
47
|
+
|
|
48
|
+
(defn gc-storage!
|
|
49
|
+
"Invokes garbage collection on the database by whitelisting currently known branches.
|
|
50
|
+
All db snapshots on these branches before remove-before date will also be
|
|
51
|
+
erased (defaults to beginning of time [no erasure]). The branch heads will
|
|
52
|
+
always be retained."
|
|
53
|
+
([db] (gc-storage! db (#?(:clj Date. :cljs js/Date.) 0)))
|
|
54
|
+
([db remove-before]
|
|
55
|
+
(go-try S
|
|
56
|
+
(let [now #?(:clj (Date.) :cljs (js/Date.))
|
|
57
|
+
_ (debug "starting gc" now)
|
|
58
|
+
{:keys [config store]} db
|
|
59
|
+
_ (sc/clear-write-cache (:store config)) ; Clear the schema write cache for this store
|
|
60
|
+
branches (<? S (k/get store :branches))
|
|
61
|
+
_ (trace "retaining branches" branches)
|
|
62
|
+
reachable (->> branches
|
|
63
|
+
(map #(reachable-in-branch store % remove-before config))
|
|
64
|
+
async/merge
|
|
65
|
+
(<<? S)
|
|
66
|
+
(apply set/union))
|
|
67
|
+
reachable (conj reachable :branches)]
|
|
68
|
+
(trace "gc reached: " reachable)
|
|
69
|
+
(<? S (sweep! store reachable now))))))
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
(ns datahike.http.client
|
|
2
|
+
(:refer-clojure :exclude [filter])
|
|
3
|
+
(:require [babashka.http-client :as http]
|
|
4
|
+
[cognitect.transit :as transit]
|
|
5
|
+
[jsonista.core :as j]
|
|
6
|
+
[hasch.core :refer [uuid]]
|
|
7
|
+
[datahike.api.specification :as api]
|
|
8
|
+
[clojure.edn :as edn]
|
|
9
|
+
[datahike.datom :as dd]
|
|
10
|
+
[datahike.remote :as remote]
|
|
11
|
+
[datahike.impl.entity :as de]
|
|
12
|
+
[taoensso.timbre :as log])
|
|
13
|
+
(:import [java.io ByteArrayOutputStream]))
|
|
14
|
+
|
|
15
|
+
(def MEGABYTE (* 1024 1024))
|
|
16
|
+
|
|
17
|
+
(def MAX_OUTPUT_BUFFER_SIZE (* 4 MEGABYTE))
|
|
18
|
+
|
|
19
|
+
(defn request-edn [method end-point remote-peer data]
|
|
20
|
+
(let [{:keys [url token]} remote-peer
|
|
21
|
+
fmt "application/edn"
|
|
22
|
+
url (str url "/" end-point)
|
|
23
|
+
body (remote/edn-replace-remote-literals (pr-str data))
|
|
24
|
+
_ (log/trace "request" url end-point token data body)
|
|
25
|
+
response
|
|
26
|
+
(try
|
|
27
|
+
(http/request (merge
|
|
28
|
+
{:uri url
|
|
29
|
+
:method method
|
|
30
|
+
:headers (merge {:content-type fmt
|
|
31
|
+
:accept fmt}
|
|
32
|
+
(when token
|
|
33
|
+
{:authorization (str "token " token)}))
|
|
34
|
+
:body body}
|
|
35
|
+
(when (= method :get)
|
|
36
|
+
{:query-params {"args-id" (uuid data)}})))
|
|
37
|
+
(catch Exception e
|
|
38
|
+
(let [msg (ex-message e)
|
|
39
|
+
data (ex-data e)
|
|
40
|
+
new-data
|
|
41
|
+
(update data :body #(edn/read-string {:readers remote/edn-readers} %))]
|
|
42
|
+
(throw (ex-info msg new-data)))))
|
|
43
|
+
response (:body response)]
|
|
44
|
+
(log/trace "response" response)
|
|
45
|
+
(edn/read-string {:readers remote/edn-readers} response)))
|
|
46
|
+
|
|
47
|
+
(defn request-transit
|
|
48
|
+
([method end-point remote-peer data]
|
|
49
|
+
(request-transit method end-point remote-peer data
|
|
50
|
+
remote/transit-read-handlers
|
|
51
|
+
remote/transit-write-handlers))
|
|
52
|
+
([method end-point remote-peer data read-handlers write-handlers]
|
|
53
|
+
(let [{:keys [url token max-output-buffer-size]}
|
|
54
|
+
remote-peer
|
|
55
|
+
fmt "application/transit+json"
|
|
56
|
+
url (str url "/" end-point)
|
|
57
|
+
out (ByteArrayOutputStream. (or max-output-buffer-size MAX_OUTPUT_BUFFER_SIZE))
|
|
58
|
+
writer (transit/writer out :json {:handlers write-handlers})
|
|
59
|
+
_ (transit/write writer data)
|
|
60
|
+
_ (log/trace "request" url end-point token data out)
|
|
61
|
+
response
|
|
62
|
+
(try
|
|
63
|
+
(http/request (merge
|
|
64
|
+
{:method method
|
|
65
|
+
:uri url
|
|
66
|
+
:headers
|
|
67
|
+
(merge {:content-type fmt
|
|
68
|
+
:accept fmt}
|
|
69
|
+
(when token
|
|
70
|
+
{:authorization (str "token " token)}))
|
|
71
|
+
:as :stream
|
|
72
|
+
:body (.toByteArray out)}
|
|
73
|
+
(when (= method :get)
|
|
74
|
+
{:query-params {"args-id" (uuid data)}})))
|
|
75
|
+
(catch Exception e
|
|
76
|
+
;; read exception
|
|
77
|
+
(let [msg (ex-message e)
|
|
78
|
+
data (ex-data e)
|
|
79
|
+
new-data
|
|
80
|
+
(update data :body
|
|
81
|
+
#(when %
|
|
82
|
+
(transit/read (transit/reader % :json {:handlers read-handlers}))))]
|
|
83
|
+
(throw (ex-info msg new-data)))))
|
|
84
|
+
response (:body response)
|
|
85
|
+
response (transit/read (transit/reader response :json {:handlers read-handlers}))]
|
|
86
|
+
(log/trace "response" response)
|
|
87
|
+
response)))
|
|
88
|
+
|
|
89
|
+
(defn request-json
|
|
90
|
+
([method end-point remote-peer data]
|
|
91
|
+
(request-json method end-point remote-peer data remote/json-mapper))
|
|
92
|
+
([method end-point remote-peer data mapper]
|
|
93
|
+
(let [{:keys [url token]}
|
|
94
|
+
remote-peer
|
|
95
|
+
fmt "application/json"
|
|
96
|
+
url (str url "/" end-point)
|
|
97
|
+
out (j/write-value-as-bytes data mapper)
|
|
98
|
+
_ (log/trace "request" url end-point token data out)
|
|
99
|
+
response
|
|
100
|
+
(try
|
|
101
|
+
(http/request (merge
|
|
102
|
+
{:method method
|
|
103
|
+
:uri url
|
|
104
|
+
:headers
|
|
105
|
+
(merge {:content-type fmt
|
|
106
|
+
:accept fmt}
|
|
107
|
+
(when token
|
|
108
|
+
{:authorization (str "token " token)}))
|
|
109
|
+
:as :stream
|
|
110
|
+
:body out}
|
|
111
|
+
(when (= method :get)
|
|
112
|
+
{:query-params {"args-id" (uuid data)}})))
|
|
113
|
+
(catch Exception e
|
|
114
|
+
;; read exception
|
|
115
|
+
(let [msg (if-let [m (ex-message e)] m "Nothing returned. Is the server reachable?")
|
|
116
|
+
data (ex-data e)
|
|
117
|
+
new-data
|
|
118
|
+
(update data :body
|
|
119
|
+
#(when %
|
|
120
|
+
(j/read-value % mapper)))]
|
|
121
|
+
(throw (ex-info (or (:msg (:body new-data)) msg)
|
|
122
|
+
(or (:ex-data (:body new-data)) new-data))))))
|
|
123
|
+
response (:body response)
|
|
124
|
+
response (j/read-value response mapper)]
|
|
125
|
+
(log/trace "response" response)
|
|
126
|
+
response)))
|
|
127
|
+
|
|
128
|
+
(defn request-json-raw [method end-point remote-peer data]
|
|
129
|
+
(let [{:keys [url token]}
|
|
130
|
+
remote-peer
|
|
131
|
+
fmt "application/json"
|
|
132
|
+
url (str url "/" end-point)
|
|
133
|
+
out data
|
|
134
|
+
_ (log/trace "request" url end-point token data out)
|
|
135
|
+
response
|
|
136
|
+
(http/request (merge
|
|
137
|
+
{:method method
|
|
138
|
+
:uri url
|
|
139
|
+
:headers
|
|
140
|
+
(merge {:content-type fmt
|
|
141
|
+
:accept fmt}
|
|
142
|
+
(when token
|
|
143
|
+
{:authorization (str "token " token)}))
|
|
144
|
+
:as :stream
|
|
145
|
+
:body out}
|
|
146
|
+
(when (= method :get)
|
|
147
|
+
{:query-params {"args-id" (uuid data)}})))
|
|
148
|
+
response (slurp (:body response))]
|
|
149
|
+
(log/trace "response" response)
|
|
150
|
+
response))
|
|
151
|
+
|
|
152
|
+
(defn get-remote [args]
|
|
153
|
+
(let [remotes (disj
|
|
154
|
+
(into
|
|
155
|
+
;; first arguments can be config maps, e.g. for
|
|
156
|
+
;; create-database; this code could be made explicit by
|
|
157
|
+
;; explicitly dispatching on when the first argument is a
|
|
158
|
+
;; config map
|
|
159
|
+
#{(:remote-peer (first args))}
|
|
160
|
+
;; other arguments to API follow hygiene
|
|
161
|
+
(map remote/remote-peer (rest args)))
|
|
162
|
+
nil)]
|
|
163
|
+
(if (> (count remotes) 1)
|
|
164
|
+
(throw (ex-info "Arguments refer to more than one remote-peer." {:remotes remotes
|
|
165
|
+
:args args}))
|
|
166
|
+
(first remotes))))
|
|
167
|
+
|
|
168
|
+
(doseq [[n {:keys [args doc supports-remote? referentially-transparent?]}] api/api-specification]
|
|
169
|
+
(eval
|
|
170
|
+
`(def
|
|
171
|
+
~(with-meta n
|
|
172
|
+
{:arglists `(api/malli-schema->argslist (quote ~args))
|
|
173
|
+
:doc doc})
|
|
174
|
+
(fn [& ~'args]
|
|
175
|
+
~(if-not supports-remote?
|
|
176
|
+
`(throw (ex-info (str ~(str n) " is not supported for remote connections.")
|
|
177
|
+
{:type :remote-not-supported
|
|
178
|
+
:function ~(str n)}))
|
|
179
|
+
`(binding [remote/*remote-peer* (get-remote ~'args)]
|
|
180
|
+
(let [format# (:format remote/*remote-peer*)]
|
|
181
|
+
(({:transit request-transit
|
|
182
|
+
:edn request-edn
|
|
183
|
+
:json request-json} (or format# :transit))
|
|
184
|
+
~(if referentially-transparent? :get :post)
|
|
185
|
+
~(api/->url n)
|
|
186
|
+
remote/*remote-peer* (vec ~'args)))))))))
|
|
187
|
+
|
|
188
|
+
(defmethod remote/remote-deref :datahike-server [conn] (db conn))
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
(ns datahike.http.writer
|
|
2
|
+
"Remote writer implementation for datahike.http.server through datahike.http.client."
|
|
3
|
+
(:require [datahike.writer :refer [PWriter create-writer create-database delete-database]]
|
|
4
|
+
[datahike.http.client :refer [request-json] :as client]
|
|
5
|
+
[datahike.json :as json]
|
|
6
|
+
[datahike.connector :as connector]
|
|
7
|
+
[datahike.tools :as dt :refer [throwable-promise]]
|
|
8
|
+
[taoensso.timbre :as log]
|
|
9
|
+
[clojure.core.async :refer [promise-chan put!]]))
|
|
10
|
+
|
|
11
|
+
(defrecord DatahikeServerWriter [remote-peer conn]
|
|
12
|
+
PWriter
|
|
13
|
+
(-dispatch! [_ arg-map]
|
|
14
|
+
(let [{:keys [op args]} arg-map
|
|
15
|
+
p (promise-chan)
|
|
16
|
+
config (:config @(:wrapped-atom conn))]
|
|
17
|
+
(log/debug "Sending operation to datahike-server:" op)
|
|
18
|
+
(log/trace "Arguments:" arg-map)
|
|
19
|
+
(put! p
|
|
20
|
+
(try
|
|
21
|
+
(request-json :post
|
|
22
|
+
(str op "-writer")
|
|
23
|
+
remote-peer
|
|
24
|
+
(vec (concat [config] args))
|
|
25
|
+
json/mapper)
|
|
26
|
+
(catch Exception e
|
|
27
|
+
e)))
|
|
28
|
+
p))
|
|
29
|
+
(-shutdown [_])
|
|
30
|
+
(-streaming? [_] false))
|
|
31
|
+
|
|
32
|
+
(defmethod create-writer :datahike-server
|
|
33
|
+
[config connection]
|
|
34
|
+
(log/debug "Creating datahike-server writer for " connection config)
|
|
35
|
+
(->DatahikeServerWriter config connection))
|
|
36
|
+
|
|
37
|
+
(defmethod create-database :datahike-server
|
|
38
|
+
[& args]
|
|
39
|
+
(let [p (throwable-promise)
|
|
40
|
+
{:keys [writer] :as config} (first args)]
|
|
41
|
+
;; redirect call to remote-peer as writer config
|
|
42
|
+
(deliver p (try (->
|
|
43
|
+
(request-json :post
|
|
44
|
+
"create-database-writer"
|
|
45
|
+
writer
|
|
46
|
+
(vec (concat [(-> config
|
|
47
|
+
(assoc :remote-peer writer)
|
|
48
|
+
(dissoc :writer))]
|
|
49
|
+
(rest args))))
|
|
50
|
+
(dissoc :remote-peer))
|
|
51
|
+
(catch Exception e
|
|
52
|
+
e)))
|
|
53
|
+
p))
|
|
54
|
+
|
|
55
|
+
(defmethod delete-database :datahike-server
|
|
56
|
+
[& args]
|
|
57
|
+
(let [p (throwable-promise)
|
|
58
|
+
{:keys [writer] :as config} (first args)]
|
|
59
|
+
;; redirect call to remote-peer as writer config
|
|
60
|
+
(deliver p (try
|
|
61
|
+
(-> (request-json :post
|
|
62
|
+
"delete-database-writer"
|
|
63
|
+
writer
|
|
64
|
+
(vec (concat [(-> config
|
|
65
|
+
(assoc :remote-peer writer)
|
|
66
|
+
(dissoc :writer))]
|
|
67
|
+
(rest args))))
|
|
68
|
+
(dissoc :remote-peer))
|
|
69
|
+
(catch Exception e
|
|
70
|
+
e)))
|
|
71
|
+
p))
|
|
72
|
+
|
|
73
|
+
;; =============================================================================
|
|
74
|
+
;; Connection Integration
|
|
75
|
+
;; =============================================================================
|
|
76
|
+
|
|
77
|
+
(defmethod connector/-connect* :datahike-server [config opts]
|
|
78
|
+
;; HTTP uses standard connection logic with async+sync behavior
|
|
79
|
+
(connector/-connect-impl* config opts))
|