datahike-browser-tests 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (324) hide show
  1. package/.circleci/config.yml +405 -0
  2. package/.circleci/scripts/gen_ci.clj +194 -0
  3. package/.cirrus.yml +60 -0
  4. package/.clj-kondo/babashka/sci/config.edn +1 -0
  5. package/.clj-kondo/babashka/sci/sci/core.clj +9 -0
  6. package/.clj-kondo/config.edn +95 -0
  7. package/.dir-locals.el +2 -0
  8. package/.github/FUNDING.yml +3 -0
  9. package/.github/ISSUE_TEMPLATE/1-bug-report.yml +68 -0
  10. package/.github/ISSUE_TEMPLATE/2-feature-request.yml +28 -0
  11. package/.github/ISSUE_TEMPLATE/config.yml +6 -0
  12. package/.github/pull_request_template.md +24 -0
  13. package/.github/workflows/native-image.yml +84 -0
  14. package/LICENSE +203 -0
  15. package/README.md +273 -0
  16. package/bb/deps.edn +9 -0
  17. package/bb/resources/github-fingerprints +3 -0
  18. package/bb/resources/native-image-tests/run-bb-pod-tests.clj +162 -0
  19. package/bb/resources/native-image-tests/run-libdatahike-tests +12 -0
  20. package/bb/resources/native-image-tests/run-native-image-tests +74 -0
  21. package/bb/resources/native-image-tests/run-python-tests +22 -0
  22. package/bb/resources/native-image-tests/testconfig.attr-refs.edn +6 -0
  23. package/bb/resources/native-image-tests/testconfig.edn +5 -0
  24. package/bb/resources/template/.settings/org.eclipse.jdt.apt.core.prefs +2 -0
  25. package/bb/resources/template/.settings/org.eclipse.jdt.core.prefs +9 -0
  26. package/bb/resources/template/.settings/org.eclipse.m2e.core.prefs +4 -0
  27. package/bb/resources/template/pom.xml +22 -0
  28. package/bb/src/tools/build.clj +132 -0
  29. package/bb/src/tools/clj_kondo.clj +32 -0
  30. package/bb/src/tools/deploy.clj +26 -0
  31. package/bb/src/tools/examples.clj +19 -0
  32. package/bb/src/tools/npm.clj +100 -0
  33. package/bb/src/tools/python.clj +14 -0
  34. package/bb/src/tools/release.clj +94 -0
  35. package/bb/src/tools/test.clj +148 -0
  36. package/bb/src/tools/version.clj +47 -0
  37. package/bb.edn +269 -0
  38. package/benchmark/src/benchmark/cli.clj +195 -0
  39. package/benchmark/src/benchmark/compare.clj +157 -0
  40. package/benchmark/src/benchmark/config.clj +316 -0
  41. package/benchmark/src/benchmark/measure.clj +187 -0
  42. package/benchmark/src/benchmark/store.clj +190 -0
  43. package/benchmark/test/benchmark/measure_test.clj +156 -0
  44. package/build.clj +30 -0
  45. package/config.edn +49 -0
  46. package/deps.edn +138 -0
  47. package/dev/sandbox.clj +82 -0
  48. package/dev/sandbox.cljs +127 -0
  49. package/dev/sandbox_benchmarks.clj +27 -0
  50. package/dev/sandbox_client.clj +87 -0
  51. package/dev/sandbox_transact_bench.clj +109 -0
  52. package/dev/user.clj +79 -0
  53. package/doc/README.md +96 -0
  54. package/doc/adl/README.md +6 -0
  55. package/doc/adl/adr-000-adr.org +28 -0
  56. package/doc/adl/adr-001-attribute-references.org +15 -0
  57. package/doc/adl/adr-002-build-tooling.org +54 -0
  58. package/doc/adl/adr-003-db-meta-data.md +52 -0
  59. package/doc/adl/adr-004-github-flow.md +40 -0
  60. package/doc/adl/adr-XYZ-template.md +30 -0
  61. package/doc/adl/index.org +3 -0
  62. package/doc/assets/datahike-logo.svg +3 -0
  63. package/doc/assets/datahiking-invoice.org +85 -0
  64. package/doc/assets/hhtree2.png +0 -0
  65. package/doc/assets/network_topology.svg +624 -0
  66. package/doc/assets/perf.png +0 -0
  67. package/doc/assets/schema_mindmap.mm +132 -0
  68. package/doc/assets/schema_mindmap.svg +970 -0
  69. package/doc/assets/temporal_index.mm +74 -0
  70. package/doc/backend-development.md +78 -0
  71. package/doc/bb-pod.md +89 -0
  72. package/doc/benchmarking.md +360 -0
  73. package/doc/bindings/edn-conversion.md +383 -0
  74. package/doc/cli.md +162 -0
  75. package/doc/cljdoc.edn +27 -0
  76. package/doc/cljs-support.md +133 -0
  77. package/doc/config.md +406 -0
  78. package/doc/contributing.md +114 -0
  79. package/doc/datalog-vs-sql.md +210 -0
  80. package/doc/datomic_differences.md +109 -0
  81. package/doc/development/pull-api-ns.md +186 -0
  82. package/doc/development/pull-frame-state-diagram.jpg +0 -0
  83. package/doc/distributed.md +566 -0
  84. package/doc/entity_spec.md +92 -0
  85. package/doc/gc.md +273 -0
  86. package/doc/java-api.md +808 -0
  87. package/doc/javascript-api.md +421 -0
  88. package/doc/libdatahike.md +86 -0
  89. package/doc/logging_and_error_handling.md +43 -0
  90. package/doc/norms.md +66 -0
  91. package/doc/schema-migration.md +85 -0
  92. package/doc/schema.md +287 -0
  93. package/doc/storage-backends.md +363 -0
  94. package/doc/store-id-refactoring.md +596 -0
  95. package/doc/time_variance.md +325 -0
  96. package/doc/unstructured.md +167 -0
  97. package/doc/versioning.md +261 -0
  98. package/examples/basic/README.md +19 -0
  99. package/examples/basic/deps.edn +6 -0
  100. package/examples/basic/docker-compose.yml +13 -0
  101. package/examples/basic/src/examples/core.clj +60 -0
  102. package/examples/basic/src/examples/schema.clj +155 -0
  103. package/examples/basic/src/examples/store.clj +60 -0
  104. package/examples/basic/src/examples/time_travel.clj +185 -0
  105. package/examples/java/.settings/org.eclipse.core.resources.prefs +3 -0
  106. package/examples/java/.settings/org.eclipse.jdt.apt.core.prefs +2 -0
  107. package/examples/java/.settings/org.eclipse.jdt.core.prefs +9 -0
  108. package/examples/java/.settings/org.eclipse.m2e.core.prefs +4 -0
  109. package/examples/java/README.md +162 -0
  110. package/examples/java/pom.xml +62 -0
  111. package/examples/java/src/main/java/examples/QuickStart.java +115 -0
  112. package/examples/java/src/main/java/examples/SchemaExample.java +148 -0
  113. package/examples/java/src/main/java/examples/TimeTravelExample.java +121 -0
  114. package/flake.lock +27 -0
  115. package/flake.nix +27 -0
  116. package/http-server/datahike/http/middleware.clj +75 -0
  117. package/http-server/datahike/http/server.clj +269 -0
  118. package/java/src/datahike/java/Database.java +274 -0
  119. package/java/src/datahike/java/Datahike.java +281 -0
  120. package/java/src/datahike/java/DatahikeGeneratedTest.java +349 -0
  121. package/java/src/datahike/java/DatahikeTest.java +370 -0
  122. package/java/src/datahike/java/EDN.java +170 -0
  123. package/java/src/datahike/java/IEntity.java +11 -0
  124. package/java/src/datahike/java/Keywords.java +161 -0
  125. package/java/src/datahike/java/SchemaFlexibility.java +52 -0
  126. package/java/src/datahike/java/Util.java +219 -0
  127. package/karma.conf.js +19 -0
  128. package/libdatahike/compile-cpp +7 -0
  129. package/libdatahike/src/datahike/impl/LibDatahikeBase.java +203 -0
  130. package/libdatahike/src/datahike/impl/libdatahike.clj +59 -0
  131. package/libdatahike/src/test_cpp.cpp +61 -0
  132. package/npm-package/PUBLISHING.md +140 -0
  133. package/npm-package/README.md +226 -0
  134. package/npm-package/package.template.json +34 -0
  135. package/npm-package/test-isomorphic.ts +281 -0
  136. package/npm-package/test.js +557 -0
  137. package/npm-package/typescript-test.ts +70 -0
  138. package/package.json +16 -0
  139. package/pydatahike/README.md +569 -0
  140. package/pydatahike/pyproject.toml +91 -0
  141. package/pydatahike/setup.py +42 -0
  142. package/pydatahike/src/datahike/__init__.py +134 -0
  143. package/pydatahike/src/datahike/_native.py +250 -0
  144. package/pydatahike/src/datahike/_version.py +2 -0
  145. package/pydatahike/src/datahike/database.py +722 -0
  146. package/pydatahike/src/datahike/edn.py +311 -0
  147. package/pydatahike/src/datahike/py.typed +0 -0
  148. package/pydatahike/tests/conftest.py +17 -0
  149. package/pydatahike/tests/test_basic.py +170 -0
  150. package/pydatahike/tests/test_database.py +51 -0
  151. package/pydatahike/tests/test_edn_conversion.py +299 -0
  152. package/pydatahike/tests/test_query.py +99 -0
  153. package/pydatahike/tests/test_schema.py +55 -0
  154. package/resources/clj-kondo.exports/io.replikativ/datahike/config.edn +5 -0
  155. package/resources/example_server.edn +4 -0
  156. package/shadow-cljs.edn +56 -0
  157. package/src/data_readers.clj +7 -0
  158. package/src/datahike/api/impl.cljc +176 -0
  159. package/src/datahike/api/specification.cljc +633 -0
  160. package/src/datahike/api/types.cljc +261 -0
  161. package/src/datahike/api.cljc +41 -0
  162. package/src/datahike/array.cljc +99 -0
  163. package/src/datahike/cli.clj +166 -0
  164. package/src/datahike/cljs.cljs +6 -0
  165. package/src/datahike/codegen/cli.clj +406 -0
  166. package/src/datahike/codegen/clj_kondo.clj +291 -0
  167. package/src/datahike/codegen/java.clj +403 -0
  168. package/src/datahike/codegen/naming.cljc +33 -0
  169. package/src/datahike/codegen/native.clj +559 -0
  170. package/src/datahike/codegen/pod.clj +488 -0
  171. package/src/datahike/codegen/python.clj +838 -0
  172. package/src/datahike/codegen/report.clj +55 -0
  173. package/src/datahike/codegen/typescript.clj +262 -0
  174. package/src/datahike/codegen/validation.clj +145 -0
  175. package/src/datahike/config.cljc +294 -0
  176. package/src/datahike/connections.cljc +16 -0
  177. package/src/datahike/connector.cljc +265 -0
  178. package/src/datahike/constants.cljc +142 -0
  179. package/src/datahike/core.cljc +297 -0
  180. package/src/datahike/datom.cljc +459 -0
  181. package/src/datahike/db/interface.cljc +119 -0
  182. package/src/datahike/db/search.cljc +305 -0
  183. package/src/datahike/db/transaction.cljc +937 -0
  184. package/src/datahike/db/utils.cljc +338 -0
  185. package/src/datahike/db.cljc +956 -0
  186. package/src/datahike/experimental/unstructured.cljc +126 -0
  187. package/src/datahike/experimental/versioning.cljc +172 -0
  188. package/src/datahike/externs.js +31 -0
  189. package/src/datahike/gc.cljc +69 -0
  190. package/src/datahike/http/client.clj +188 -0
  191. package/src/datahike/http/writer.clj +79 -0
  192. package/src/datahike/impl/entity.cljc +218 -0
  193. package/src/datahike/index/interface.cljc +93 -0
  194. package/src/datahike/index/persistent_set.cljc +469 -0
  195. package/src/datahike/index/utils.cljc +44 -0
  196. package/src/datahike/index.cljc +32 -0
  197. package/src/datahike/js/api.cljs +172 -0
  198. package/src/datahike/js/api_macros.clj +22 -0
  199. package/src/datahike/js.cljs +163 -0
  200. package/src/datahike/json.cljc +209 -0
  201. package/src/datahike/lru.cljc +146 -0
  202. package/src/datahike/migrate.clj +39 -0
  203. package/src/datahike/norm/norm.clj +245 -0
  204. package/src/datahike/online_gc.cljc +252 -0
  205. package/src/datahike/pod.clj +155 -0
  206. package/src/datahike/pull_api.cljc +325 -0
  207. package/src/datahike/query.cljc +1945 -0
  208. package/src/datahike/query_stats.cljc +88 -0
  209. package/src/datahike/readers.cljc +62 -0
  210. package/src/datahike/remote.cljc +218 -0
  211. package/src/datahike/schema.cljc +228 -0
  212. package/src/datahike/schema_cache.cljc +42 -0
  213. package/src/datahike/spec.cljc +101 -0
  214. package/src/datahike/store.cljc +80 -0
  215. package/src/datahike/tools.cljc +308 -0
  216. package/src/datahike/transit.cljc +80 -0
  217. package/src/datahike/writer.cljc +239 -0
  218. package/src/datahike/writing.cljc +362 -0
  219. package/src/deps.cljs +1 -0
  220. package/src-hitchhiker-tree/datahike/index/hitchhiker_tree/insert.cljc +76 -0
  221. package/src-hitchhiker-tree/datahike/index/hitchhiker_tree/upsert.cljc +128 -0
  222. package/src-hitchhiker-tree/datahike/index/hitchhiker_tree.cljc +213 -0
  223. package/test/datahike/backward_compatibility_test/src/backward_test.clj +37 -0
  224. package/test/datahike/integration_test/config_record_file_test.clj +14 -0
  225. package/test/datahike/integration_test/config_record_test.clj +14 -0
  226. package/test/datahike/integration_test/depr_config_uri_test.clj +15 -0
  227. package/test/datahike/integration_test/return_map_test.clj +62 -0
  228. package/test/datahike/integration_test.cljc +67 -0
  229. package/test/datahike/norm/norm_test.clj +124 -0
  230. package/test/datahike/norm/resources/naming-and-sorting-test/001-a1-example.edn +5 -0
  231. package/test/datahike/norm/resources/naming-and-sorting-test/002-a2-example.edn +5 -0
  232. package/test/datahike/norm/resources/naming-and-sorting-test/003-tx-fn-test.edn +1 -0
  233. package/test/datahike/norm/resources/naming-and-sorting-test/004-tx-data-and-tx-fn-test.edn +5 -0
  234. package/test/datahike/norm/resources/naming-and-sorting-test/01-transact-basic-characters.edn +2 -0
  235. package/test/datahike/norm/resources/naming-and-sorting-test/02 add occupation.edn +5 -0
  236. package/test/datahike/norm/resources/naming-and-sorting-test/checksums.edn +12 -0
  237. package/test/datahike/norm/resources/simple-test/001-a1-example.edn +5 -0
  238. package/test/datahike/norm/resources/simple-test/002-a2-example.edn +5 -0
  239. package/test/datahike/norm/resources/simple-test/checksums.edn +4 -0
  240. package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/001-a1-example.edn +5 -0
  241. package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/002-a2-example.edn +5 -0
  242. package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/003-tx-fn-test.edn +1 -0
  243. package/test/datahike/norm/resources/tx-data-and-tx-fn-test/first/checksums.edn +6 -0
  244. package/test/datahike/norm/resources/tx-data-and-tx-fn-test/second/004-tx-data-and-tx-fn-test.edn +5 -0
  245. package/test/datahike/norm/resources/tx-data-and-tx-fn-test/second/checksums.edn +2 -0
  246. package/test/datahike/norm/resources/tx-fn-test/first/001-a1-example.edn +5 -0
  247. package/test/datahike/norm/resources/tx-fn-test/first/002-a2-example.edn +5 -0
  248. package/test/datahike/norm/resources/tx-fn-test/first/checksums.edn +4 -0
  249. package/test/datahike/norm/resources/tx-fn-test/second/003-tx-fn-test.edn +1 -0
  250. package/test/datahike/norm/resources/tx-fn-test/second/checksums.edn +2 -0
  251. package/test/datahike/test/api_test.cljc +895 -0
  252. package/test/datahike/test/array_test.cljc +40 -0
  253. package/test/datahike/test/attribute_refs/datoms_test.cljc +140 -0
  254. package/test/datahike/test/attribute_refs/db_test.cljc +42 -0
  255. package/test/datahike/test/attribute_refs/differences_test.cljc +515 -0
  256. package/test/datahike/test/attribute_refs/entity_test.cljc +89 -0
  257. package/test/datahike/test/attribute_refs/pull_api_test.cljc +320 -0
  258. package/test/datahike/test/attribute_refs/query_find_specs_test.cljc +59 -0
  259. package/test/datahike/test/attribute_refs/query_fns_test.cljc +130 -0
  260. package/test/datahike/test/attribute_refs/query_interop_test.cljc +47 -0
  261. package/test/datahike/test/attribute_refs/query_not_test.cljc +193 -0
  262. package/test/datahike/test/attribute_refs/query_or_test.cljc +137 -0
  263. package/test/datahike/test/attribute_refs/query_pull_test.cljc +156 -0
  264. package/test/datahike/test/attribute_refs/query_rules_test.cljc +176 -0
  265. package/test/datahike/test/attribute_refs/query_test.cljc +241 -0
  266. package/test/datahike/test/attribute_refs/temporal_search.cljc +22 -0
  267. package/test/datahike/test/attribute_refs/transact_test.cljc +220 -0
  268. package/test/datahike/test/attribute_refs/utils.cljc +128 -0
  269. package/test/datahike/test/cache_test.cljc +38 -0
  270. package/test/datahike/test/components_test.cljc +92 -0
  271. package/test/datahike/test/config_test.cljc +158 -0
  272. package/test/datahike/test/core_test.cljc +105 -0
  273. package/test/datahike/test/datom_test.cljc +44 -0
  274. package/test/datahike/test/db_test.cljc +54 -0
  275. package/test/datahike/test/entity_spec_test.cljc +159 -0
  276. package/test/datahike/test/entity_test.cljc +103 -0
  277. package/test/datahike/test/explode_test.cljc +143 -0
  278. package/test/datahike/test/filter_test.cljc +75 -0
  279. package/test/datahike/test/gc_test.cljc +159 -0
  280. package/test/datahike/test/http/server_test.clj +192 -0
  281. package/test/datahike/test/http/writer_test.clj +86 -0
  282. package/test/datahike/test/ident_test.cljc +32 -0
  283. package/test/datahike/test/index_test.cljc +345 -0
  284. package/test/datahike/test/insert.cljc +125 -0
  285. package/test/datahike/test/java_bindings_test.clj +6 -0
  286. package/test/datahike/test/listen_test.cljc +41 -0
  287. package/test/datahike/test/lookup_refs_test.cljc +266 -0
  288. package/test/datahike/test/lru_test.cljc +27 -0
  289. package/test/datahike/test/migrate_test.clj +297 -0
  290. package/test/datahike/test/model/core.cljc +376 -0
  291. package/test/datahike/test/model/invariant.cljc +142 -0
  292. package/test/datahike/test/model/rng.cljc +82 -0
  293. package/test/datahike/test/model_test.clj +217 -0
  294. package/test/datahike/test/nodejs_test.cljs +262 -0
  295. package/test/datahike/test/online_gc_test.cljc +475 -0
  296. package/test/datahike/test/pod_test.clj +369 -0
  297. package/test/datahike/test/pull_api_test.cljc +474 -0
  298. package/test/datahike/test/purge_test.cljc +144 -0
  299. package/test/datahike/test/query_aggregates_test.cljc +101 -0
  300. package/test/datahike/test/query_find_specs_test.cljc +52 -0
  301. package/test/datahike/test/query_fns_test.cljc +523 -0
  302. package/test/datahike/test/query_interop_test.cljc +47 -0
  303. package/test/datahike/test/query_not_test.cljc +189 -0
  304. package/test/datahike/test/query_or_test.cljc +158 -0
  305. package/test/datahike/test/query_pull_test.cljc +147 -0
  306. package/test/datahike/test/query_rules_test.cljc +248 -0
  307. package/test/datahike/test/query_stats_test.cljc +218 -0
  308. package/test/datahike/test/query_test.cljc +984 -0
  309. package/test/datahike/test/schema_test.cljc +424 -0
  310. package/test/datahike/test/specification_test.cljc +30 -0
  311. package/test/datahike/test/store_test.cljc +78 -0
  312. package/test/datahike/test/stress_test.cljc +57 -0
  313. package/test/datahike/test/time_variance_test.cljc +518 -0
  314. package/test/datahike/test/tools_test.clj +134 -0
  315. package/test/datahike/test/transact_test.cljc +518 -0
  316. package/test/datahike/test/tuples_test.cljc +564 -0
  317. package/test/datahike/test/unstructured_test.cljc +291 -0
  318. package/test/datahike/test/upsert_impl_test.cljc +205 -0
  319. package/test/datahike/test/upsert_test.cljc +363 -0
  320. package/test/datahike/test/utils.cljc +110 -0
  321. package/test/datahike/test/validation_test.cljc +48 -0
  322. package/test/datahike/test/versioning_test.cljc +56 -0
  323. package/test/datahike/test.cljc +66 -0
  324. package/tests.edn +24 -0
@@ -0,0 +1,261 @@
1
+ # Versioning: Branch Databases, Not Just Code
2
+
3
+ **Status: Beta - API becoming stable. Please try it out and provide feedback at [contact@datahike.io](mailto:contact@datahike.io).**
4
+
5
+ Datahike's versioning system brings **git-like branching and merging** to your database. Just as git lets you experiment with code changes in branches before merging, Datahike lets you branch entire databases, evolve them independently, and selectively merge changes back.
6
+
7
+ ## Why Branch Databases?
8
+
9
+ **Structural sharing makes branching efficient.** Unlike copying entire databases, Datahike branches share unchanged data through persistent data structures—the same principle that makes git fast. Creating a branch is nearly instantaneous regardless of database size, because only new or modified index nodes are written.
10
+
11
+ When you create a branch, you get:
12
+ - **Isolated evolution**: Experiment without affecting production data
13
+ - **Selective merging**: Choose exactly which changes to apply
14
+ - **Zero data duplication**: Shared data exists only once in storage
15
+ - **Git-like semantics**: Branch, commit, merge with familiar concepts
16
+
17
+ ## When to Use Branches vs. Separate Databases
18
+
19
+ **Use branches when:**
20
+ - Testing schema migrations before applying to production
21
+ - Creating staging environments for data review and approval
22
+ - Running what-if analyses or experiments
23
+ - Collaborative editing where changes need review before merging
24
+ - You need to evolve a single logical dataset through different paths
25
+
26
+ **Use separate databases when:**
27
+ - Data is logically independent (different customers, different projects)
28
+ - You want complete isolation with no shared storage
29
+ - Combining data from multiple sources (data federation)
30
+ - Scaling reads across completely separate datasets
31
+
32
+ ## How Branches Work with Distributed Index Space
33
+
34
+ Branches are implemented as **different root pointers** in the same storage backend. Each branch name (`:db`, `:staging`, `:experimental`) points to a different commit, but all branches share the underlying persistent indices.
35
+
36
+ This means:
37
+ - Multiple readers can access different branches simultaneously via [DIS](distributed.md)
38
+ - No coordination needed between readers on different branches
39
+ - Branches can be accessed from any process with storage access
40
+ - Each branch maintains its own transaction history
41
+
42
+ ## Relationship to Time-Travel Queries
43
+
44
+ Datahike provides two complementary ways to work with history:
45
+
46
+ **Versioning (branches):**
47
+ - Creates durable, named snapshots that evolve independently
48
+ - Allows merging changes between snapshots
49
+ - Permanent until explicitly deleted
50
+ - Use for: experiments, staging, alternative versions
51
+
52
+ **Time-travel queries (as-of, history, since):**
53
+ - Views past states of a single branch
54
+ - Read-only access to transaction history
55
+ - Automatic if `:keep-history? true`
56
+ - Use for: auditing, debugging, temporal queries
57
+
58
+ Both rely on the same persistent data structures, but serve different purposes.
59
+
60
+ ## API Overview
61
+
62
+ The versioning API provides the following operations:
63
+
64
+ - `branch!` - Create a new branch from an existing branch
65
+ - `merge!` - Merge changes from one or more branches
66
+ - `force-branch!` - Create a branch from any in-memory DB value
67
+ - `delete-branch!` - Remove a branch
68
+ - `branch-history` - View commit history for a branch
69
+ - `commit-as-db` - Load a specific commit as a DB value
70
+ - `branch-as-db` - Load the current state of a branch
71
+ - `parent-commit-ids` - Get parent commits (for merge commits)
72
+
73
+ All operations work with the connection's configured storage backend—no special setup required.
74
+
75
+ ## Example Use Cases
76
+
77
+ ### Testing Schema Migrations
78
+
79
+ ```clojure
80
+ (require '[datahike.api :as d]
81
+ '[datahike.experimental.versioning :refer [branch! merge! delete-branch!]])
82
+
83
+ (let [cfg {:store {:backend :file :path "/var/db/production"}
84
+ :keep-history? true
85
+ :schema-flexibility :write}
86
+ conn (d/connect cfg)]
87
+
88
+ ;; Create migration test branch
89
+ (branch! conn :db :migration-test)
90
+ (let [test-conn (d/connect (assoc cfg :branch :migration-test))]
91
+
92
+ ;; Try new schema
93
+ (d/transact test-conn [{:db/ident :email
94
+ :db/valueType :db.type/string
95
+ :db/cardinality :db.cardinality/one
96
+ :db/unique :db.unique/identity}])
97
+
98
+ ;; Test with sample data
99
+ (d/transact test-conn [{:email "test@example.com"}])
100
+
101
+ ;; Verify migration worked, then merge to production
102
+ (when (verify-migration test-conn)
103
+ (merge! conn #{:migration-test} (migration-tx-data test-conn))
104
+ (delete-branch! conn :migration-test))))
105
+ ```
106
+
107
+ ### Staging Environment for Data Review
108
+
109
+ ```clojure
110
+ ;; Editorial workflow: draft changes in staging, review, then publish
111
+
112
+ (let [cfg {:store {:backend :s3 :bucket "my-content-db"}
113
+ :schema-flexibility :write}
114
+ prod-conn (d/connect cfg)]
115
+
116
+ ;; Editor creates staging branch
117
+ (branch! prod-conn :db :staging)
118
+ (let [staging-conn (d/connect (assoc cfg :branch :staging))]
119
+
120
+ ;; Make draft changes
121
+ (d/transact staging-conn [{:article/title "New Article"
122
+ :article/status :draft
123
+ :article/content "..."}])
124
+
125
+ ;; Reviewers can read staging branch without affecting production
126
+ ;; ... review process ...
127
+
128
+ ;; Approved? Merge to production
129
+ (let [approved-changes (extract-approved-changes staging-conn)]
130
+ (merge! prod-conn #{:staging} approved-changes))))
131
+ ```
132
+
133
+ ### Running Experiments
134
+
135
+ ```clojure
136
+ ;; Test different recommendation algorithms without affecting live data
137
+
138
+ (let [cfg {:store {:backend :file :path "/var/db/recommendations"}
139
+ :keep-history? false} ;; Don't need history for experiments
140
+ conn (d/connect cfg)]
141
+
142
+ ;; Create experimental branch
143
+ (branch! conn :db :experiment-new-algo)
144
+ (let [exp-conn (d/connect (assoc cfg :branch :experiment-new-algo))]
145
+
146
+ ;; Load experimental algorithm results
147
+ (d/transact exp-conn experimental-recommendations)
148
+
149
+ ;; Analyze results
150
+ (let [metrics (analyze-recommendations @exp-conn)]
151
+ (if (better-than-baseline? metrics)
152
+ ;; Good results - merge to production
153
+ (merge! conn #{:experiment-new-algo} experimental-recommendations)
154
+ ;; Poor results - just delete the branch
155
+ (delete-branch! conn :experiment-new-algo)))))
156
+ ```
157
+
158
+ ## Complete API Example
159
+
160
+ The following example demonstrates the full versioning API:
161
+
162
+ ~~~clojure
163
+ (require '[superv.async :refer [<?? S]]
164
+ '[datahike.api :as d]
165
+ '[datahike.experimental.versioning :refer [branch! branch-history delete-branch! force-branch! merge!
166
+ branch-as-db commit-as-db parent-commit-ids]])
167
+
168
+ (let [cfg {:store {:backend :file
169
+ :path "/tmp/dh-versioning-test"}
170
+ :keep-history? true
171
+ :schema-flexibility :write
172
+ :index :datahike.index/persistent-set}
173
+ conn (do
174
+ (d/delete-database cfg)
175
+ (d/create-database cfg)
176
+ (d/connect cfg))
177
+ schema [{:db/ident :age
178
+ :db/cardinality :db.cardinality/one
179
+ :db/valueType :db.type/long}]
180
+ _ (d/transact conn schema)
181
+ store (:store @conn)]
182
+ (branch! conn :db :foo) ;; new branch :foo, does not create new commit, just copies
183
+ (let [foo-conn (d/connect (assoc cfg :branch :foo))] ;; connect to it
184
+ (d/transact foo-conn [{:age 42}]) ;; transact some data
185
+ ;; extracted data from foo by query
186
+ ;; ...
187
+ ;; and decide to merge it into :db
188
+ (merge! conn #{:foo} [{:age 42}]))
189
+ (count (parent-commit-ids @conn)) ;; => 2, as :db got merged from :foo and :db
190
+ ;; check that the commit stored is the same db as conn
191
+ (= (commit-as-db store (commit-id @conn)) (branch-as-db store :db) @conn) ;; => true
192
+ (count (<?? S (branch-history conn))) ;; => 4 commits now on both branches
193
+ (force-branch! @conn :foo2 #{:foo}) ;; put whatever DB value you have created in memory
194
+ (delete-branch! conn :foo))
195
+ ~~~
196
+
197
+ Here we create a database as usual, but then we create a branch `:foo`, write to
198
+ it and then merge it back. A simple query to extract all data in transactable
199
+ form that is in a `branch1` db but not in `branch2` is
200
+
201
+ ~~~clojure
202
+ (d/q [:find ?db-add ?e ?a ?v ?t
203
+ :in $ $2 ?db-add
204
+ :where
205
+ [$ ?e ?a ?v ?t]
206
+ [(not= :db/txInstant ?a)]
207
+ (not [$2 ?e ?a ?v ?t])]
208
+ branch1 branch2 :db/add)
209
+ ~~~
210
+
211
+ but you might want to be more selective when creating the data for `merge!`.
212
+
213
+ ## Query Pattern: Extracting Branch Differences
214
+
215
+ When merging, you typically want to extract only specific changes from a branch. Here's a general pattern for finding differences:
216
+
217
+ ```clojure
218
+ ;; Find all datoms in branch1 that are not in branch2
219
+ (defn branch-diff [branch1 branch2]
220
+ (d/q '[:find ?e ?a ?v ?t
221
+ :in $ $2
222
+ :where
223
+ [$ ?e ?a ?v ?t]
224
+ [(not= :db/txInstant ?a)]
225
+ (not [$2 ?e ?a ?v ?t])]
226
+ branch1 branch2))
227
+
228
+ ;; Extract as transaction data
229
+ (defn diff-as-tx-data [branch1 branch2]
230
+ (mapv (fn [[e a v t]] [:db/add e a v])
231
+ (branch-diff branch1 branch2)))
232
+ ```
233
+
234
+ You can extend this pattern to:
235
+ - Filter by specific attributes (e.g., only user-facing data)
236
+ - Extract only entities matching certain criteria
237
+ - Apply transformations before merging
238
+ - Validate changes before committing to the target branch
239
+
240
+ ## Integration with Existing Connections
241
+
242
+ Branches integrate seamlessly with Datahike's connection model:
243
+
244
+ ```clojure
245
+ ;; Connect to specific branch by name
246
+ (def staging-conn (d/connect {:store {...} :branch :staging}))
247
+
248
+ ;; Default branch is :db
249
+ (def main-conn (d/connect {:store {...}})) ;; same as :branch :db
250
+
251
+ ;; Each connection operates independently
252
+ (d/transact staging-conn [...]) ;; doesn't affect main-conn
253
+ @staging-conn ;; DB snapshot of :staging branch
254
+ @main-conn ;; DB snapshot of :db branch
255
+ ```
256
+
257
+ Branches work with all storage backends (file, S3, JDBC, etc.) and participate in [Distributed Index Space](distributed.md)—multiple processes can read different branches concurrently.
258
+
259
+ ## Feedback and Support
260
+
261
+ We are actively developing the versioning API and would love to hear about your use cases. If you have ideas, feature requests, or encounter issues, please reach out to [contact@datahike.io](mailto:contact@datahike.io) or open an issue on [GitHub](https://github.com/replikativ/datahike/issues).
@@ -0,0 +1,19 @@
1
+ # Examples
2
+
3
+ This project shows some use cases for the store, the schema, and temporal index implemented in datahike.
4
+
5
+ ## Usage
6
+
7
+ Open in `/src/examples` the topic you want to explore in your favourite editor, use a clojure repl to
8
+ eval the expressions from top to bottom.
9
+
10
+ For the [PostgreSQL](https://www.postgresql.org) example in `store.clj` you need to have
11
+ [docker](https://www.docker.com/) and
12
+ [docker-compose](https://docs.docker.com/compose/) installed.
13
+ Start it with:
14
+
15
+ ``` sh
16
+ docker-compose up -d
17
+ ```
18
+
19
+ If the selected ports collide with other ports, you may want to adjust `/docker-compose.yml.` and restart the container.
@@ -0,0 +1,6 @@
1
+ {:paths ["src"]
2
+ :deps {org.clojure/clojure {:mvn/version "1.11.1"}
3
+ org.replikativ/datahike {:local/root "../.."}
4
+ ;; For released versions, use:
5
+ ;; org.replikativ/datahike {:mvn/version "0.7.1620"}
6
+ }}
@@ -0,0 +1,13 @@
1
+ version: '3.4'
2
+ services:
3
+ db:
4
+ image: postgres
5
+ restart: always
6
+ environment:
7
+ POSTGRES_USER: datahike
8
+ POSTGRES_PASSWORD: clojure
9
+ POSTGRES_DB: "pg-example"
10
+ ports:
11
+ - "5437:5432"
12
+ volumes:
13
+ - /tmp/datahike_example/data:/var/lib/postgresql/data
@@ -0,0 +1,60 @@
1
+ (ns examples.core
2
+ (:require [datahike.api :as d]))
3
+
4
+ (def cfg {:store {:backend :file
5
+ :path "/tmp/example"}
6
+ :keep-history? true
7
+ :schema-flexibility :read})
8
+
9
+ ;; create a database at this place, per default configuration we enforce a strict
10
+ ;; schema and keep all historical data
11
+ (d/create-database cfg)
12
+
13
+ (def conn (d/connect cfg))
14
+
15
+ ;; the first transaction will be the schema we are using
16
+ ;; you may also add this within database creation by adding :initial-tx
17
+ ;; to the configuration
18
+ (d/transact conn [{:db/ident :name
19
+ :db/valueType :db.type/string
20
+ :db/cardinality :db.cardinality/one }
21
+ {:db/ident :age
22
+ :db/valueType :db.type/long
23
+ :db/cardinality :db.cardinality/one }])
24
+
25
+ ;; lets add some data and wait for the transaction
26
+ (d/transact conn [{:name "Alice", :age 20 }
27
+ {:name "Bob", :age 30 }
28
+ {:name "Charlie", :age 40 }
29
+ {:age 15 }])
30
+
31
+ ;; search the data
32
+ (d/q '[:find ?e ?n ?a
33
+ :where
34
+ [?e :name ?n]
35
+ [?e :age ?a]]
36
+ @conn)
37
+ ;; => #{[3 "Alice" 20] [4 "Bob" 30] [5 "Charlie" 40]}
38
+
39
+ ;; add new entity data using a hash map
40
+ (d/transact conn {:tx-data [{:db/id 3 :age 25}]})
41
+
42
+ ;; if you want to work with queries like in
43
+ ;; https://grishaev.me/en/datomic-query/,
44
+ ;; you may use a hashmap
45
+ (d/q {:query '{:find [?e ?n ?a ]
46
+ :where [[?e :name ?n]
47
+ [?e :age ?a]]}
48
+ :args [@conn]})
49
+ ;; => #{[5 "Charlie" 40] [4 "Bob" 30] [3 "Alice" 25]}
50
+
51
+ ;; query the history of the data
52
+ (d/q '[:find ?a
53
+ :where
54
+ [?e :name "Alice"]
55
+ [?e :age ?a]]
56
+ (d/history @conn))
57
+ ;; => #{[20] [25]}
58
+
59
+ ;; clean up the database if it is not needed any more
60
+ (d/delete-database cfg)
@@ -0,0 +1,155 @@
1
+ (ns examples.schema
2
+ (:require [datahike.api :as d]))
3
+
4
+ ;; The first example assumes you know your data model in advanve,
5
+ ;; so you we can use a schema-on-write approach in contrast to a schema-on-read
6
+ ;; approach. Have a look at the documentation in `/doc/schema.md` for more
7
+ ;; information on the different types of schema flexibility. After the first
8
+ ;; example we will have a short schema-on-read example.
9
+
10
+ ; first define data model
11
+ (def schema [{:db/ident :contributor/name
12
+ :db/valueType :db.type/string
13
+ :db/unique :db.unique/identity
14
+ :db/index true
15
+ :db/cardinality :db.cardinality/one
16
+ :db/doc "a contributor's name"}
17
+ {:db/ident :contributor/email
18
+ :db/valueType :db.type/string
19
+ :db/cardinality :db.cardinality/many
20
+ :db/doc "a contributor's email"}
21
+ {:db/ident :repository/name
22
+ :db/valueType :db.type/string
23
+ :db/unique :db.unique/identity
24
+ :db/index true
25
+ :db/cardinality :db.cardinality/one
26
+ :db/doc "a repository's name"}
27
+ {:db/ident :repository/contributors
28
+ :db/valueType :db.type/ref
29
+ :db/cardinality :db.cardinality/many
30
+ :db/doc "the repository's contributors"}
31
+ {:db/ident :repository/public
32
+ :db/valueType :db.type/boolean
33
+ :db/cardinality :db.cardinality/one
34
+ :db/doc "toggle whether the repository is public"}
35
+ {:db/ident :repository/tags
36
+ :db/valueType :db.type/ref
37
+ :db/cardinality :db.cardinality/many
38
+ :db/doc "the repository's tags"}
39
+ {:db/ident :language/clojure}
40
+ {:db/ident :language/rust}])
41
+
42
+ ;; define configuration
43
+ (def cfg {:store {:backend :mem
44
+ :id "schema-intro"}
45
+ :schema-flexibility :write})
46
+
47
+ ;; cleanup previous database
48
+ (d/delete-database cfg)
49
+
50
+ ;; create the in-memory database
51
+ (d/create-database cfg)
52
+
53
+ ;; connect to it
54
+ (def conn (d/connect cfg))
55
+
56
+ ;; add the schema
57
+
58
+ (d/transact conn schema)
59
+
60
+ ;; let's insert our first user
61
+ (d/transact conn [{:contributor/name "alice" :contributor/email "alice@exam.ple"}])
62
+
63
+ ;; let's find her with a query
64
+ (def find-name-email '[:find ?e ?n ?em :where [?e :contributor/name ?n] [?e :contributor/email ?em]])
65
+
66
+ (d/q find-name-email @conn)
67
+
68
+ ;; let's find her directly, as contributor/name is a unique, indexed identity
69
+ (d/pull @conn '[*] [:contributor/name "alice"])
70
+
71
+ ;; add a second email, as we have a many cardinality, we can have several ones as a user
72
+ (d/transact conn [{:db/id [:contributor/name "alice"] :contributor/email "alice@test.test"}])
73
+
74
+ ;; let's see both emails
75
+ (d/q find-name-email @conn)
76
+
77
+ ;; try to add something completely not defined in the schema
78
+ (d/transact conn [{:something "different"}])
79
+ ;; => Exception shows missing schema definition
80
+
81
+ ;; try to add wrong contributor values
82
+ (d/transact conn [{:contributor/email :alice}])
83
+ ;; => Exception shows what value is expected
84
+
85
+ ;; add another contributor by using a the alternative transaction schema that expects a hash map with tx-data attribute
86
+ (d/transact conn {:tx-data [{:contributor/name "bob" :contributor/email "bob@ac.me"}]})
87
+
88
+ (d/q find-name-email @conn)
89
+
90
+ (d/pull @conn '[*] [:contributor/name "bob"])
91
+
92
+ ;; change bob's name to bobby
93
+ (d/transact conn [{:db/id [:contributor/name "bob"] :contributor/name "bobby"}])
94
+
95
+ ;; check it
96
+ (d/q find-name-email @conn)
97
+
98
+ (d/pull @conn '[*] [:contributor/name "bobby"])
99
+
100
+ ;; bob is not related anymore as index
101
+ (d/pull @conn '[*] [:contributor/name "bob"])
102
+ ;; will give an exception
103
+
104
+ ;; create a repository, with refs from uniques, and an ident as enum
105
+ (d/transact conn [{:repository/name "top secret"
106
+ :repository/public false
107
+ :repository/contributors [[:contributor/name "bobby"] [:contributor/name "alice"]]
108
+ :repository/tags :language/clojure}])
109
+
110
+ ;; let's search with pull inside the query
111
+ (def find-repositories '[:find (pull ?e [*]) :where [?e :repository/name ?n]])
112
+
113
+ ;; looks good
114
+ (d/q find-repositories @conn)
115
+
116
+ ;; let's go further and fetch the related contributor data as well
117
+ (def find-repositories-with-contributors '[:find (pull ?e [* {:repository/contributors [*] :repository/tags [*]}]) :where [?e :repository/name ?n]])
118
+
119
+ (d/q find-repositories-with-contributors @conn)
120
+
121
+ ;; the schema is part of the index, so we can query them too.
122
+ ;; Let's find all attribute names and their description.
123
+ (d/q '[:find ?a ?d :where [?e :db/ident ?a] [?e :db/doc ?d]] @conn)
124
+
125
+ ;; cleanup the database
126
+ (d/delete-database cfg)
127
+
128
+ ;; Schema On Read
129
+
130
+ ;; let's create another database that can hold any arbitrary data
131
+
132
+ (def cfg {:store {:backend :mem
133
+ :id "schemaless"}
134
+ :schema-flexibility :read})
135
+
136
+ (d/create-database cfg)
137
+
138
+ (def conn (d/connect cfg))
139
+
140
+ ;; now we can go wild and transact anything
141
+ (d/transact conn [{:any "thing"}])
142
+
143
+ ;; use simple query on this data
144
+ (d/q '[:find ?v :where [_ :any ?v]] @conn)
145
+
146
+ ;; be aware: although there is no schema, you should tell the database if some
147
+ ;; attributes can have specific cardinality or indices.
148
+ ;; You may add that as schema transactions like before
149
+ (d/transact conn [{:db/ident :any :db/cardinality :db.cardinality/many}])
150
+
151
+ ;; let's add more data to the first any entity
152
+ (def any-eid (d/q '[:find ?e . :where [?e :any "thing"]] @conn))
153
+ (d/transact conn [{:db/id any-eid :any "thing else"}])
154
+
155
+ (d/q '[:find ?v :where [_ :any ?v]] @conn)
@@ -0,0 +1,60 @@
1
+ (ns examples.store
2
+ (:require [datahike.api :as d]))
3
+
4
+ (def schema [{:db/ident :name
5
+ :db/valueType :db.type/string
6
+ :db/cardinality :db.cardinality/one}])
7
+
8
+ (def query '[:find ?n :where [?e :name ?n]])
9
+
10
+ ;; let's cleanup, create, and connect all in one
11
+ (defn cleanup-and-create-conn [cfg]
12
+ (d/delete-database cfg)
13
+ (d/create-database cfg)
14
+ (let [conn (d/connect cfg)]
15
+ (d/transact conn schema)
16
+ conn))
17
+
18
+ (defn transact-and-find [conn name]
19
+ (d/transact conn [{:name name}])
20
+ (d/q query @conn))
21
+
22
+ ;; first let's have a look at the memory store which uses an atom internally to store data
23
+ ;; memory backend requires a UUID identifier for distributed tracking
24
+ (def mem-cfg {:store {:backend :memory :id (java.util.UUID/randomUUID)}})
25
+
26
+ ;; create it
27
+ (def mem-conn (cleanup-and-create-conn mem-cfg))
28
+
29
+ ;; add and find data
30
+ (transact-and-find mem-conn "Alice");; => #{["Alice"]}
31
+
32
+ ;; next we try out file based store which can be used as the simplest form of persistence
33
+ ;; the datoms are serialized at `/tmp/file_example`
34
+ (def file-cfg {:store {:backend :file :path "/tmp/file_example"}})
35
+
36
+ (def file-conn (cleanup-and-create-conn file-cfg))
37
+
38
+ (transact-and-find file-conn "Bob");; => #{["Bob"]}
39
+
40
+ ;; External backends
41
+ ;; Datahike supports additional backends via plugins:
42
+ ;; - PostgreSQL (via datahike-jdbc)
43
+ ;; - S3 (via datahike-s3)
44
+ ;; - Redis, LevelDB, etc.
45
+ ;; See https://github.com/replikativ/datahike for available backends
46
+
47
+ ;; We can query across multiple databases
48
+ (d/q '[:find ?mem ?file
49
+ :in $mem-db $file-db
50
+ :where
51
+ [$mem-db ?e0 :name ?mem]
52
+ [$file-db ?e1 :name ?file]]
53
+ (d/db mem-conn)
54
+ (d/db file-conn)) ;; => #{["Alice" "Bob"]}
55
+
56
+ ;; cleanup
57
+ (do
58
+ (d/delete-database mem-cfg)
59
+ (d/delete-database file-cfg))
60
+