lamindb 0.32.0__tar.gz → 0.33.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. {lamindb-0.32.0 → lamindb-0.33.0}/.github/workflows/build.yml +2 -1
  2. lamindb-0.33.0/.gitmodules +3 -0
  3. lamindb-0.33.0/PKG-INFO +236 -0
  4. lamindb-0.33.0/README.md +198 -0
  5. {lamindb-0.32.0 → lamindb-0.33.0}/docs/changelog.md +9 -2
  6. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/data-validation.ipynb +2 -2
  7. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/ingest-acid.ipynb +1 -1
  8. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/ingest-same-file-twice.ipynb +1 -1
  9. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/ingest.ipynb +5 -21
  10. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/lazy-loading.ipynb +2 -2
  11. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/nb.ipynb +31 -7
  12. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/session.ipynb +23 -4
  13. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/stream.ipynb +11 -23
  14. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/track-runin.ipynb +17 -26
  15. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/01-setup.ipynb +27 -11
  16. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/03-files.ipynb +136 -37
  17. lamindb-0.33.0/docs/guide/04-memory.ipynb +263 -0
  18. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/05-existing.ipynb +115 -47
  19. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/06-folder.ipynb +114 -21
  20. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/07-select.ipynb +4 -4
  21. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/08-add-delete.ipynb +1 -1
  22. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/08-run.ipynb +91 -123
  23. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/09-schema.ipynb +2 -2
  24. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/10-knowledge.ipynb +17 -8
  25. lamindb-0.32.0/docs/guide/11-link-features.ipynb → lamindb-0.33.0/docs/guide/11-scrna.ipynb +128 -76
  26. lamindb-0.33.0/docs/guide/12-flow.ipynb +225 -0
  27. lamindb-0.32.0/docs/guide/12-link-samples.ipynb → lamindb-0.33.0/docs/guide/14-link-samples.ipynb +1 -1
  28. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/15-query-book.ipynb +11 -66
  29. lamindb-0.33.0/docs/guide/features.md +20 -0
  30. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/index.md +1 -1
  31. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/__init__.py +20 -13
  32. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/_check_versions.py +4 -4
  33. lamindb-0.33.0/lamindb/_context.py +196 -0
  34. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/_delete.py +13 -7
  35. lamindb-0.33.0/lamindb/_load.py +36 -0
  36. lamindb-0.33.0/lamindb/_nb.py +109 -0
  37. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/_record.py +55 -21
  38. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/db/_add.py +22 -7
  39. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/schema/__init__.py +0 -14
  40. lamindb-0.33.0/lnschema-core/.github/workflows/build.yml +94 -0
  41. lamindb-0.33.0/lnschema-core/.github/workflows/latest-changes.jinja2 +2 -0
  42. lamindb-0.33.0/lnschema-core/.github/workflows/latest-changes.yml +25 -0
  43. lamindb-0.33.0/lnschema-core/.gitignore +116 -0
  44. lamindb-0.33.0/lnschema-core/.pre-commit-config.yaml +65 -0
  45. lamindb-0.33.0/lnschema-core/LICENSE +201 -0
  46. lamindb-0.33.0/lnschema-core/README.md +7 -0
  47. lamindb-0.33.0/lnschema-core/docs/changelog.md +122 -0
  48. lamindb-0.33.0/lnschema-core/docs/guide/0-schema.ipynb +64 -0
  49. lamindb-0.32.0/docs/guide/04-memory.ipynb → lamindb-0.33.0/lnschema-core/docs/guide/1-basic-orms.ipynb +38 -29
  50. lamindb-0.33.0/lnschema-core/docs/guide/2-data-validation.ipynb +201 -0
  51. lamindb-0.33.0/lnschema-core/docs/guide/index.md +9 -0
  52. lamindb-0.33.0/lnschema-core/docs/index.md +11 -0
  53. lamindb-0.33.0/lnschema-core/lamin-project.yaml +5 -0
  54. lamindb-0.33.0/lnschema-core/lnschema_core/__init__.py +53 -0
  55. lamindb-0.33.0/lnschema-core/lnschema_core/_core.py +612 -0
  56. lamindb-0.33.0/lnschema-core/lnschema_core/_link.py +52 -0
  57. lamindb-0.33.0/lnschema-core/lnschema_core/_timestamps.py +5 -0
  58. lamindb-0.33.0/lnschema-core/lnschema_core/_users.py +10 -0
  59. lamindb-0.33.0/lnschema-core/lnschema_core/dev/__init__.py +23 -0
  60. lamindb-0.33.0/lnschema-core/lnschema_core/dev/_id.py +118 -0
  61. lamindb-0.33.0/lnschema-core/lnschema_core/dev/_storage.py +34 -0
  62. lamindb-0.33.0/lnschema-core/lnschema_core/dev/_type.py +13 -0
  63. lamindb-0.33.0/lnschema-core/lnschema_core/dev/_versions.py +35 -0
  64. lamindb-0.33.0/lnschema-core/lnschema_core/dev/id.py +48 -0
  65. lamindb-0.33.0/lnschema-core/lnschema_core/dev/sqlmodel.py +333 -0
  66. lamindb-0.33.0/lnschema-core/lnschema_core/dev/type.py +8 -0
  67. lamindb-0.33.0/lnschema-core/lnschema_core/link.py +12 -0
  68. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-07-21-0560ee3d73dc-jupynb.py +92 -0
  69. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-08-08-8c78543d1c5b-v0_3_0.py +162 -0
  70. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-08-19-1c531ea346cf-storage.py +62 -0
  71. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-08-22-01fcb82dafd4-v0_4_0.py +114 -0
  72. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-08-26-3badf20f18c8-v0_5_0.py +73 -0
  73. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-08-29-d1b3e5da6391-v0_5_1.py +29 -0
  74. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-09-15-5fa54c55c3bf-v0_6_0.py +28 -0
  75. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-09-18-049d7dfc80a8-v0_7_1.py +38 -0
  76. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-09-18-3b60b87450c0-v0_7_0.py +144 -0
  77. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-09-24-1f29517759b7-v0_7_3.py +91 -0
  78. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-09-25-7e8f7b30792e-v0_8_0.py +27 -0
  79. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-09-26-1190648443cb-v0_8_1.py +25 -0
  80. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-09-30-439c4ee0a22a-v0_9_0.py +36 -0
  81. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-10-07-0c819d33ca9b-v0_10_0.py +43 -0
  82. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-10-10-3d244a8d3148-v0_11_0.py +26 -0
  83. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-10-11-2ddcb037e3ea-v0_12_0.py +26 -0
  84. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-10-19-cf5913791674-v0_14_0.py +100 -0
  85. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-10-31-98da12fc80a8-v0_15_0.py +50 -0
  86. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-11-10-4ee426b656bb-v0_16_0.py +158 -0
  87. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-11-11-66bfd6cf2e2d-v0_17_0.py +152 -0
  88. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-11-28-4b4005b7841c-v0_21_1.py +117 -0
  89. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-12-07-9d94f3b9566d-v0_21_3.py +52 -0
  90. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2022-12-07-db1df7b2aaad-v0_22_0.py +66 -0
  91. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-01-09-f6b6b85cdffc-v0_24_0.py +38 -0
  92. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-02-9d283a1685a5-v0_25_6.py +48 -0
  93. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-07-8bf788467d0a-v0_25_9.py +120 -0
  94. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-10-ff3b5b3ec913-v0_26_1.py +30 -0
  95. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-14-8280855a5064-v0_26_2.py +37 -0
  96. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-21-1dafcf0b22aa-v0_28_0.py +31 -0
  97. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-22-6952574e2d49-v0_28_1.py +39 -0
  98. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-23-24e55331f27c-v0_28_2.py +52 -0
  99. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-23-3dd9f8d41861-v0_28_3.py +23 -0
  100. lamindb-0.33.0/lnschema-core/lnschema_core/migrations/versions/2023-02-24-873683a29806-v0_28_7.py +24 -0
  101. lamindb-0.33.0/lnschema-core/noxfile.py +27 -0
  102. lamindb-0.33.0/lnschema-core/pyproject.toml +51 -0
  103. lamindb-0.33.0/lnschema-core/tests/test_base.py +14 -0
  104. lamindb-0.33.0/lnschema-core/tests/test_migrations.py +35 -0
  105. lamindb-0.33.0/lnschema-core/tests/test_notebooks.py +16 -0
  106. {lamindb-0.32.0 → lamindb-0.33.0}/noxfile.py +7 -0
  107. {lamindb-0.32.0 → lamindb-0.33.0}/pyproject.toml +2 -2
  108. {lamindb-0.32.0 → lamindb-0.33.0}/tests/test_db.py +1 -1
  109. lamindb-0.32.0/PKG-INFO +0 -178
  110. lamindb-0.32.0/README.md +0 -140
  111. lamindb-0.32.0/lamindb/_load.py +0 -46
  112. lamindb-0.32.0/lamindb/_nb.py +0 -174
  113. {lamindb-0.32.0 → lamindb-0.33.0}/.github/workflows/latest-changes.jinja2 +0 -0
  114. {lamindb-0.32.0 → lamindb-0.33.0}/.github/workflows/latest-changes.yml +0 -0
  115. {lamindb-0.32.0 → lamindb-0.33.0}/.gitignore +0 -0
  116. {lamindb-0.32.0 → lamindb-0.33.0}/.pre-commit-config.yaml +0 -0
  117. {lamindb-0.32.0 → lamindb-0.33.0}/LICENSE +0 -0
  118. {lamindb-0.32.0 → lamindb-0.33.0}/docs/api.md +0 -0
  119. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/index.md +0 -0
  120. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/select.ipynb +0 -0
  121. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/setup.ipynb +0 -0
  122. {lamindb-0.32.0 → lamindb-0.33.0}/docs/faq/storage.ipynb +0 -0
  123. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/dobject.md +0 -0
  124. {lamindb-0.32.0 → lamindb-0.33.0}/docs/guide/track.md +0 -0
  125. {lamindb-0.32.0 → lamindb-0.33.0}/docs/index.md +0 -0
  126. {lamindb-0.32.0 → lamindb-0.33.0}/lamin-project.yaml +0 -0
  127. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/_folder.py +0 -0
  128. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/_logger.py +0 -0
  129. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/_settings.py +0 -0
  130. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/_subset.py +0 -0
  131. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/_view.py +0 -0
  132. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/__init__.py +0 -0
  133. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/_core.py +0 -0
  134. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/_docs.py +0 -0
  135. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/datasets/__init__.py +0 -0
  136. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/datasets/_core.py +0 -0
  137. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/db/__init__.py +0 -0
  138. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/db/_core.py +0 -0
  139. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/db/_select.py +0 -0
  140. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/db/_session.py +0 -0
  141. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/db/_track_usage.py +0 -0
  142. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/file/__init__.py +0 -0
  143. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/file/_file.py +0 -0
  144. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/file/_filesystem.py +0 -0
  145. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/file/_h5ad.py +0 -0
  146. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/file/_images.py +0 -0
  147. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/file/_zarr.py +0 -0
  148. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/object/__init__.py +0 -0
  149. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/object/_anndata.py +0 -0
  150. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/object/_anndata_sizes.py +0 -0
  151. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/object/_core.py +0 -0
  152. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/object/_lazy_field.py +0 -0
  153. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/dev/object/_subset_anndata.py +0 -0
  154. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/schema/_core.py +0 -0
  155. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/schema/_table.py +0 -0
  156. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/schema/dev/__init__.py +0 -0
  157. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/schema/dev/id.py +0 -0
  158. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/schema/dev/sqlmodel.py +0 -0
  159. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/schema/dev/type.py +0 -0
  160. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/setup/__init__.py +0 -0
  161. {lamindb-0.32.0 → lamindb-0.33.0}/lamindb/setup/dev.py +0 -0
  162. {lamindb-0.32.0 → lamindb-0.33.0}/tests/conftest.py +0 -0
  163. {lamindb-0.32.0 → lamindb-0.33.0}/tests/test_conversion.py +0 -0
  164. {lamindb-0.32.0 → lamindb-0.33.0}/tests/test_file_hashing.py +0 -0
  165. {lamindb-0.32.0 → lamindb-0.33.0}/tests/test_notebooks.py +0 -0
@@ -21,6 +21,7 @@ jobs:
21
21
  - name: Checkout main
22
22
  uses: actions/checkout@v3
23
23
  with:
24
+ submodules: recursive
24
25
  fetch-depth: 0
25
26
  - name: Checkout lndocs
26
27
  uses: actions/checkout@v3
@@ -46,7 +47,7 @@ jobs:
46
47
  run: |
47
48
  python -m pip install -U pip
48
49
  pip install -U laminci
49
- pip install lamindb
50
+ pip install -U lamindb
50
51
  sudo apt-get -y install graphviz
51
52
  sudo apt-get install libpq-dev
52
53
  - name: Lint
@@ -0,0 +1,3 @@
1
+ [submodule "lnschema-core"]
2
+ path = lnschema-core
3
+ url = https://github.com/laminlabs/lnschema-core.git
@@ -0,0 +1,236 @@
1
+ Metadata-Version: 2.1
2
+ Name: lamindb
3
+ Version: 0.33.0
4
+ Summary: LaminDB: Manage R&D data & analyses.
5
+ Author-email: Lamin Labs <laminlabs@gmail.com>
6
+ Description-Content-Type: text/markdown
7
+ Classifier: Programming Language :: Python :: 3.8
8
+ Classifier: Programming Language :: Python :: 3.9
9
+ Classifier: Programming Language :: Python :: 3.10
10
+ Requires-Dist: lndb>=0.37.4
11
+ Requires-Dist: lnschema_core==0.29.5
12
+ Requires-Dist: lnschema_wetlab==0.13.4
13
+ Requires-Dist: lnschema_bionty==0.8.0
14
+ Requires-Dist: nbproject>=0.8.3
15
+ Requires-Dist: readfcs>=1.1.0
16
+ Requires-Dist: anndata>=0.8.0
17
+ Requires-Dist: hjson
18
+ Requires-Dist: sqlmodel>=0.0.8
19
+ Requires-Dist: typeguard
20
+ Requires-Dist: tabulate
21
+ Requires-Dist: erdiagram
22
+ Requires-Dist: zarr
23
+ Requires-Dist: pre-commit ; extra == "dev"
24
+ Requires-Dist: nox ; extra == "dev"
25
+ Requires-Dist: laminci>=0.2.5 ; extra == "dev"
26
+ Requires-Dist: pytest>=6.0 ; extra == "test"
27
+ Requires-Dist: coverage ; extra == "test"
28
+ Requires-Dist: pytest-cov ; extra == "test"
29
+ Requires-Dist: scanpy ; extra == "test"
30
+ Requires-Dist: scikit-learn>=1.1.1 ; extra == "test"
31
+ Requires-Dist: nbproject_test>=0.3.0 ; extra == "test"
32
+ Requires-Dist: psycopg2-binary ; extra == "test"
33
+ Requires-Dist: matplotlib<3.7 ; extra == "test"
34
+ Project-URL: Home, https://github.com/laminlabs/lamindb
35
+ Provides-Extra: dev
36
+ Provides-Extra: test
37
+
38
+ [![Stars](https://img.shields.io/github/stars/laminlabs/lamindb?logo=GitHub&color=yellow)](https://github.com/laminlabs/lamindb)
39
+ [![codecov](https://codecov.io/gh/laminlabs/lamindb/branch/main/graph/badge.svg?token=VKMRJ7OWR3)](https://codecov.io/gh/laminlabs/lamindb)
40
+ [![pypi](https://img.shields.io/pypi/v/lamindb?color=blue&label=pypi%20package)](https://pypi.org/project/lamindb)
41
+
42
+ # LaminDB: Manage R&D data & analyses
43
+
44
+ _Curate, store, track, query, integrate, and learn from biological data._
45
+
46
+ LaminDB is an open-source data lake for R&D in biology. It manages indexed **object storage** (local directories, S3, GCP) with a mapped **SQL database** (SQLite, Postgres, and soon, BigQuery).
47
+
48
+ One cool thing is that you can readily create distributed _LaminDB instances_ at any scale. Get started on your laptop, deploy in the cloud, or work with a mesh of instances for different teams and purposes.
49
+
50
+ ```{warning}
51
+
52
+ Public beta: Currently only recommended for collaborators as we still make breaking changes.
53
+
54
+ ```
55
+
56
+ ## Installation
57
+
58
+ LaminDB is a python package available for Python versions 3.8+.
59
+
60
+ ```shell
61
+ pip install lamindb
62
+ ```
63
+
64
+ ## Import
65
+
66
+ In your python script, import LaminDB as:
67
+
68
+ ```python
69
+ import lamindb as ln
70
+ ```
71
+
72
+ ## Quick setup
73
+
74
+ Quick setup on the command line:
75
+
76
+ - Sign up via `lamin signup <email>`
77
+ - Log in via `lamin login <handle>`
78
+ - Set up an instance via `lamin init --storage <storage> --schema <schema_modules>`
79
+
80
+ :::{dropdown} Example code
81
+
82
+ ```shell
83
+ lamin signup testuser1@lamin.ai
84
+ lamin login testuser1
85
+ lamin init --storage ./mydata --schema bionty,wetlab
86
+ ```
87
+
88
+ :::
89
+
90
+ See {doc}`/guide/setup` for more.
91
+
92
+ ## Track & query data
93
+
94
+ ### Track data sources, data, and metadata
95
+
96
+ ::::{tab-set}
97
+ :::{tab-item} Within an interactive notebook
98
+
99
+ ```{code-block} python
100
+ import lamindb as ln
101
+
102
+ ln.Run() # data source (a run record) is created
103
+ #> ℹ️ Instance: testuser2/mydata
104
+ #> ℹ️ User: testuser2
105
+ #> ℹ️ Loaded run:
106
+ #> Run(id='L1oBMKW60ndt5YtjRqav', notebook_id='sePTpDsGJRq3', notebook_v='0', created_by='bKeW4T6E', created_at=datetime.datetime(2023, 3, 14, 21, 49, 36))
107
+
108
+ df = pd.DataFrame({"a": [1, 2], "b": [3, 4]})
109
+
110
+ # create a data object with SQL metadata record including hash
111
+ # link run record
112
+ dobject = ln.DObject(df, name="My dataframe")
113
+ #> DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c')
114
+
115
+ # upload serialized version to the configured storage
116
+ # commit a DObject record to the SQL database
117
+ ln.add(dobject)
118
+ #> DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c', created_at=datetime.datetime(2023, 3, 14, 21, 49, 46))
119
+ ```
120
+
121
+ :::
122
+ :::{tab-item} Within a regular pipeline
123
+
124
+ ```{code-block} python
125
+ # create (or query) a pipeline record
126
+ pipeline = lns.Pipeline(name="My pipeline")
127
+ #> Pipeline(id='fhn5Zydf', v='1', name='My pipeline', created_by='bKeW4T6E')
128
+
129
+ # create a run from the above pipeline as the data source
130
+ run = ln.Run(pipeline=pipeline)
131
+ #> Run(id='2aaKWH8dwBE6hnj3n9K9', pipeline_id='fhn5Zydf', pipeline_v='1', created_by='bKeW4T6E')
132
+
133
+ # access pipeline from run via
134
+ print(run.pipeline)
135
+ #> Pipeline(id='fhn5Zydf', v='1', name='My pipeline', created_by='bKeW4T6E')
136
+
137
+ df = pd.DataFrame({"a": [1, 2], "b": [3, 4]})
138
+
139
+ # create a data object with SQL metadata record including hash and link run record
140
+ dobject = ln.DObject(df, name="My dataframe", source=run)
141
+ #> DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c')
142
+
143
+ # Tip: If you work with a single thread, you can pass `global_context=True` to ln.Run(), allowing you to omit source=run
144
+
145
+ # upload serialized version to the configured storage
146
+ # commit a DObject record to the SQL database
147
+ ln.add(dobject)
148
+ #> DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c', created_at=datetime.datetime(2023, 3, 14, 21, 49, 46))
149
+ ```
150
+
151
+ :::
152
+ ::::
153
+
154
+ ### Query & load data
155
+
156
+ ```python
157
+ dobject = ln.select(ln.DObject, name="My dataframe").one()
158
+ #> [DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c', created_at=datetime.datetime(2023, 3, 14, 21, 49, 46))]
159
+ df = dobject.load()
160
+ #> a b
161
+ #> 0 1 3
162
+ #> 1 2 4
163
+ ```
164
+
165
+ Get the data ingested by the latest run:
166
+
167
+ ```python
168
+ run = ln.select(ln.Run).order_by(ln.Run.created_at.desc()).first()
169
+ #> Run(id='L1oBMKW60ndt5YtjRqav', notebook_id='sePTpDsGJRq3', notebook_v='0', created_by='bKeW4T6E', created_at=datetime.datetime(2023, 3, 14, 21, 49, 36))
170
+ dobject = ln.select(ln.DObject).where(ln.DObject.source == run).all()
171
+ #> [DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c', created_at=datetime.datetime(2023, 3, 14, 21, 49, 46))]
172
+ ```
173
+
174
+ <br>
175
+
176
+ See {doc}`/guide/track` for more.
177
+
178
+ ## Track biological metadata
179
+
180
+ ### Track biological features
181
+
182
+ ```python
183
+ import bionty as bt # Lamin's manager for biological knowledge
184
+ import lamindb as ln
185
+
186
+ ln.Run() # assume we're in a notebook and don't need to pass pipeline_name
187
+
188
+ # a sample single cell RNA-seq dataset
189
+ adata = ln.dev.datasets.anndata_mouse_sc_lymph_node()
190
+
191
+ # Create a reference
192
+ # - ensembl id as the standardized id
193
+ # - mouse as the species
194
+ reference = bt.Gene(species="mouse")
195
+
196
+ # parse gene identifiers from data and map on reference
197
+ features = ln.Features(adata, reference)
198
+ #> 🔶 id column not found, using index as features.
199
+ #> ✅ 0 terms (0.0%) are mapped.
200
+ #> 🔶 10000 terms (100.0%) are not mapped.
201
+ # The result is a hashed feature set record:
202
+ print(features)
203
+ #> Features(id='2Mv3JtH-ScBVYHilbLaQ', type='gene', created_by='bKeW4T6E')
204
+ # genes records can be accessed via:
205
+ print(features.genes[:3])
206
+ #> [Gene(id='ENSMUSG00000020592', species_id='NCBI_10090'),
207
+ #> Gene(id='ENSMUSG00000034931', species_id='NCBI_10090'),
208
+ #> Gene(id='ENSMUSG00000071005', species_id='NCBI_10090')]
209
+
210
+ # track data with features
211
+ dobject = ln.DObject(adata, name="Mouse Lymph Node scRNA-seq", features=features)
212
+
213
+ # access linked gene references
214
+ print(dobject.features.genes[:3])
215
+ #> [Gene(id='ENSMUSG00000020592', species_id='NCBI_10090'),
216
+ #> Gene(id='ENSMUSG00000034931', species_id='NCBI_10090'),
217
+ #> Gene(id='ENSMUSG00000071005', species_id='NCBI_10090')]
218
+
219
+ # upload serialized data to configured storage
220
+ # commit a DObject record to the SQL database
221
+ # commit all linked features to the SQL database
222
+ ln.add(dobject)
223
+ ```
224
+
225
+ <br>
226
+
227
+ See {doc}`/guide/features` for more.
228
+
229
+ ```{tip}
230
+ - Each page in this guide is a Jupyter Notebook, which you can download [here](https://github.com/laminlabs/lamindb/tree/main/docs/guide).
231
+ - You can run these notebooks in hosted versions of JupyterLab, e.g., [Saturn Cloud](https://github.com/laminlabs/run-lamin-on-saturn), Google Vertex AI, and others.
232
+ - We recommend using [JupyterLab](https://jupyterlab.readthedocs.io/) for best notebook tracking experience.
233
+ ```
234
+
235
+ 📬 [Reach out](https://lamin.ai/contact) to report issues, learn about data modules that connect your assays, pipelines & workflows within our data platform enterprise plan.
236
+
@@ -0,0 +1,198 @@
1
+ [![Stars](https://img.shields.io/github/stars/laminlabs/lamindb?logo=GitHub&color=yellow)](https://github.com/laminlabs/lamindb)
2
+ [![codecov](https://codecov.io/gh/laminlabs/lamindb/branch/main/graph/badge.svg?token=VKMRJ7OWR3)](https://codecov.io/gh/laminlabs/lamindb)
3
+ [![pypi](https://img.shields.io/pypi/v/lamindb?color=blue&label=pypi%20package)](https://pypi.org/project/lamindb)
4
+
5
+ # LaminDB: Manage R&D data & analyses
6
+
7
+ _Curate, store, track, query, integrate, and learn from biological data._
8
+
9
+ LaminDB is an open-source data lake for R&D in biology. It manages indexed **object storage** (local directories, S3, GCP) with a mapped **SQL database** (SQLite, Postgres, and soon, BigQuery).
10
+
11
+ One cool thing is that you can readily create distributed _LaminDB instances_ at any scale. Get started on your laptop, deploy in the cloud, or work with a mesh of instances for different teams and purposes.
12
+
13
+ ```{warning}
14
+
15
+ Public beta: Currently only recommended for collaborators as we still make breaking changes.
16
+
17
+ ```
18
+
19
+ ## Installation
20
+
21
+ LaminDB is a python package available for Python versions 3.8+.
22
+
23
+ ```shell
24
+ pip install lamindb
25
+ ```
26
+
27
+ ## Import
28
+
29
+ In your python script, import LaminDB as:
30
+
31
+ ```python
32
+ import lamindb as ln
33
+ ```
34
+
35
+ ## Quick setup
36
+
37
+ Quick setup on the command line:
38
+
39
+ - Sign up via `lamin signup <email>`
40
+ - Log in via `lamin login <handle>`
41
+ - Set up an instance via `lamin init --storage <storage> --schema <schema_modules>`
42
+
43
+ :::{dropdown} Example code
44
+
45
+ ```shell
46
+ lamin signup testuser1@lamin.ai
47
+ lamin login testuser1
48
+ lamin init --storage ./mydata --schema bionty,wetlab
49
+ ```
50
+
51
+ :::
52
+
53
+ See {doc}`/guide/setup` for more.
54
+
55
+ ## Track & query data
56
+
57
+ ### Track data sources, data, and metadata
58
+
59
+ ::::{tab-set}
60
+ :::{tab-item} Within an interactive notebook
61
+
62
+ ```{code-block} python
63
+ import lamindb as ln
64
+
65
+ ln.Run() # data source (a run record) is created
66
+ #> ℹ️ Instance: testuser2/mydata
67
+ #> ℹ️ User: testuser2
68
+ #> ℹ️ Loaded run:
69
+ #> Run(id='L1oBMKW60ndt5YtjRqav', notebook_id='sePTpDsGJRq3', notebook_v='0', created_by='bKeW4T6E', created_at=datetime.datetime(2023, 3, 14, 21, 49, 36))
70
+
71
+ df = pd.DataFrame({"a": [1, 2], "b": [3, 4]})
72
+
73
+ # create a data object with SQL metadata record including hash
74
+ # link run record
75
+ dobject = ln.DObject(df, name="My dataframe")
76
+ #> DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c')
77
+
78
+ # upload serialized version to the configured storage
79
+ # commit a DObject record to the SQL database
80
+ ln.add(dobject)
81
+ #> DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c', created_at=datetime.datetime(2023, 3, 14, 21, 49, 46))
82
+ ```
83
+
84
+ :::
85
+ :::{tab-item} Within a regular pipeline
86
+
87
+ ```{code-block} python
88
+ # create (or query) a pipeline record
89
+ pipeline = lns.Pipeline(name="My pipeline")
90
+ #> Pipeline(id='fhn5Zydf', v='1', name='My pipeline', created_by='bKeW4T6E')
91
+
92
+ # create a run from the above pipeline as the data source
93
+ run = ln.Run(pipeline=pipeline)
94
+ #> Run(id='2aaKWH8dwBE6hnj3n9K9', pipeline_id='fhn5Zydf', pipeline_v='1', created_by='bKeW4T6E')
95
+
96
+ # access pipeline from run via
97
+ print(run.pipeline)
98
+ #> Pipeline(id='fhn5Zydf', v='1', name='My pipeline', created_by='bKeW4T6E')
99
+
100
+ df = pd.DataFrame({"a": [1, 2], "b": [3, 4]})
101
+
102
+ # create a data object with SQL metadata record including hash and link run record
103
+ dobject = ln.DObject(df, name="My dataframe", source=run)
104
+ #> DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c')
105
+
106
+ # Tip: If you work with a single thread, you can pass `global_context=True` to ln.Run(), allowing you to omit source=run
107
+
108
+ # upload serialized version to the configured storage
109
+ # commit a DObject record to the SQL database
110
+ ln.add(dobject)
111
+ #> DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c', created_at=datetime.datetime(2023, 3, 14, 21, 49, 46))
112
+ ```
113
+
114
+ :::
115
+ ::::
116
+
117
+ ### Query & load data
118
+
119
+ ```python
120
+ dobject = ln.select(ln.DObject, name="My dataframe").one()
121
+ #> [DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c', created_at=datetime.datetime(2023, 3, 14, 21, 49, 46))]
122
+ df = dobject.load()
123
+ #> a b
124
+ #> 0 1 3
125
+ #> 1 2 4
126
+ ```
127
+
128
+ Get the data ingested by the latest run:
129
+
130
+ ```python
131
+ run = ln.select(ln.Run).order_by(ln.Run.created_at.desc()).first()
132
+ #> Run(id='L1oBMKW60ndt5YtjRqav', notebook_id='sePTpDsGJRq3', notebook_v='0', created_by='bKeW4T6E', created_at=datetime.datetime(2023, 3, 14, 21, 49, 36))
133
+ dobject = ln.select(ln.DObject).where(ln.DObject.source == run).all()
134
+ #> [DObject(id='dZvGD7YUKCKG4X4aLd5K', name='My dataframe', suffix='.parquet', size=2240, hash='R2_kKlH1nBGesMdyulMYkA', source_id='L1oBMKW60ndt5YtjRqav', storage_id='wor0ul6c', created_at=datetime.datetime(2023, 3, 14, 21, 49, 46))]
135
+ ```
136
+
137
+ <br>
138
+
139
+ See {doc}`/guide/track` for more.
140
+
141
+ ## Track biological metadata
142
+
143
+ ### Track biological features
144
+
145
+ ```python
146
+ import bionty as bt # Lamin's manager for biological knowledge
147
+ import lamindb as ln
148
+
149
+ ln.Run() # assume we're in a notebook and don't need to pass pipeline_name
150
+
151
+ # a sample single cell RNA-seq dataset
152
+ adata = ln.dev.datasets.anndata_mouse_sc_lymph_node()
153
+
154
+ # Create a reference
155
+ # - ensembl id as the standardized id
156
+ # - mouse as the species
157
+ reference = bt.Gene(species="mouse")
158
+
159
+ # parse gene identifiers from data and map on reference
160
+ features = ln.Features(adata, reference)
161
+ #> 🔶 id column not found, using index as features.
162
+ #> ✅ 0 terms (0.0%) are mapped.
163
+ #> 🔶 10000 terms (100.0%) are not mapped.
164
+ # The result is a hashed feature set record:
165
+ print(features)
166
+ #> Features(id='2Mv3JtH-ScBVYHilbLaQ', type='gene', created_by='bKeW4T6E')
167
+ # genes records can be accessed via:
168
+ print(features.genes[:3])
169
+ #> [Gene(id='ENSMUSG00000020592', species_id='NCBI_10090'),
170
+ #> Gene(id='ENSMUSG00000034931', species_id='NCBI_10090'),
171
+ #> Gene(id='ENSMUSG00000071005', species_id='NCBI_10090')]
172
+
173
+ # track data with features
174
+ dobject = ln.DObject(adata, name="Mouse Lymph Node scRNA-seq", features=features)
175
+
176
+ # access linked gene references
177
+ print(dobject.features.genes[:3])
178
+ #> [Gene(id='ENSMUSG00000020592', species_id='NCBI_10090'),
179
+ #> Gene(id='ENSMUSG00000034931', species_id='NCBI_10090'),
180
+ #> Gene(id='ENSMUSG00000071005', species_id='NCBI_10090')]
181
+
182
+ # upload serialized data to configured storage
183
+ # commit a DObject record to the SQL database
184
+ # commit all linked features to the SQL database
185
+ ln.add(dobject)
186
+ ```
187
+
188
+ <br>
189
+
190
+ See {doc}`/guide/features` for more.
191
+
192
+ ```{tip}
193
+ - Each page in this guide is a Jupyter Notebook, which you can download [here](https://github.com/laminlabs/lamindb/tree/main/docs/guide).
194
+ - You can run these notebooks in hosted versions of JupyterLab, e.g., [Saturn Cloud](https://github.com/laminlabs/run-lamin-on-saturn), Google Vertex AI, and others.
195
+ - We recommend using [JupyterLab](https://jupyterlab.readthedocs.io/) for best notebook tracking experience.
196
+ ```
197
+
198
+ 📬 [Reach out](https://lamin.ai/contact) to report issues, learn about data modules that connect your assays, pipelines & workflows within our data platform enterprise plan.
@@ -3,10 +3,17 @@
3
3
  <!-- prettier-ignore -->
4
4
  Name | PR | Developer | Date | Version
5
5
  --- | --- | --- | --- | ---
6
+ 🚚 Replace `ln.nb.header()` with `ln.Run()` except in `faq/nb` | [564](https://github.com/laminlabs/lamindb/pull/564) | [falexwolf](https://github.com/falexwolf) | 2023-03-14 | 0.33.0
7
+ 🚸 Smart about `global_context` and `load_latest` when run from notebook | [563](https://github.com/laminlabs/lamindb/pull/563) | [falexwolf](https://github.com/falexwolf) | 2023-03-14 |
8
+ ✨ `ln.Features` | [562](https://github.com/laminlabs/lamindb/pull/562) | [sunnyosun](https://github.com/sunnyosun) | 2023-03-14 |
9
+ 🏗️ Introduce `lamindb.context` and enable `ln.Run` to create contexts | [561](https://github.com/laminlabs/lamindb/pull/561) | [falexwolf](https://github.com/falexwolf) | 2023-03-13 |
10
+ 📝 Improve the docstrings of `ln.add` and `ln.delete` | [559](https://github.com/laminlabs/lamindb/pull/559) | [sunnyosun](https://github.com/sunnyosun) | 2023-03-10 |
11
+ 📝 Hide CI related cells in notebooks | [558](https://github.com/laminlabs/lamindb/pull/558) | [sunnyosun](https://github.com/sunnyosun) | 2023-03-10 |
12
+ 📝 Update docs to clarify sign up and log in | [557](https://github.com/laminlabs/lamindb/pull/557) | [lawrlee](https://github.com/lawrlee) | 2023-03-10 |
6
13
  📝 Prettier species query | [555](https://github.com/laminlabs/lamindb/pull/555) | [falexwolf](https://github.com/falexwolf) | 2023-03-09 | 0.32.0
7
14
  📝 Refactor docs sidebar | [553](https://github.com/laminlabs/lamindb/pull/553) | [sunnyosun](https://github.com/sunnyosun) | 2023-03-09 |
8
- ⬆️ Upgrade setup | [554](https://github.com/laminlabs/lamindb/pull/554) | [falexwolf](https://github.com/falexwolf) | 2023-03-09 |
9
- ⬆️ Stable release of lnschema-bionty, remove knowledge | [552](https://github.com/laminlabs/lamindb/pull/552) | [sunnyosun](https://github.com/sunnyosun) | 2023-03-09 |
15
+ ⬆️ Upgrade `ln.setup` | [554](https://github.com/laminlabs/lamindb/pull/554) | [falexwolf](https://github.com/falexwolf) | 2023-03-09 |
16
+ 🔥 Remove `ln.knowledge` <span class="badge badge-warning">Breaking</span> | [552](https://github.com/laminlabs/lamindb/pull/552) | [sunnyosun](https://github.com/sunnyosun) | 2023-03-09 |
10
17
  ➖ Remove bionty as a dependency | [551](https://github.com/laminlabs/lamindb/pull/551) | [sunnyosun](https://github.com/sunnyosun) | 2023-03-09 | 0.32.0rc1
11
18
  📝 Replace `ln.knowledge` with `bionty` in docs | [547](https://github.com/laminlabs/lamindb/pull/547) | [falexwolf](https://github.com/falexwolf) | 2023-03-07 | 0.31.1
12
19
  📝 Link FAQ and guide to session and notebook API | [550](https://github.com/laminlabs/lamindb/pull/550) | [falexwolf](https://github.com/falexwolf) | 2023-03-07 |
@@ -309,7 +309,7 @@
309
309
  ],
310
310
  "metadata": {
311
311
  "kernelspec": {
312
- "display_name": "Python 3.9.12 ('base1')",
312
+ "display_name": "Python 3 (ipykernel)",
313
313
  "language": "python",
314
314
  "name": "python3"
315
315
  },
@@ -323,7 +323,7 @@
323
323
  "name": "python",
324
324
  "nbconvert_exporter": "python",
325
325
  "pygments_lexer": "ipython3",
326
- "version": "3.9.12"
326
+ "version": "3.9.15"
327
327
  }
328
328
  },
329
329
  "nbformat": 4,
@@ -40,7 +40,7 @@
40
40
  "import lamindb as ln\n",
41
41
  "import lamindb.schema as lns\n",
42
42
  "\n",
43
- "ln.nb.header()"
43
+ "ln.Run()"
44
44
  ]
45
45
  },
46
46
  {
@@ -30,7 +30,7 @@
30
30
  "import lamindb as ln\n",
31
31
  "import pytest\n",
32
32
  "\n",
33
- "ln.nb.header()"
33
+ "ln.Run()"
34
34
  ]
35
35
  },
36
36
  {
@@ -50,20 +50,7 @@
50
50
  "metadata": {},
51
51
  "outputs": [],
52
52
  "source": [
53
- "NO_SOURCE_ERROR = \"\"\"\n",
54
- "Error: Please link a data source using the `source` argument.\n",
55
- "Fix: Link a data source by passing a run, e.g., via\n",
56
- "\n",
57
- "pipeline = ln.select(\"My ingestion pipeline\").one()\n",
58
- "run = lns.Run(pipeline=pipeline)\n",
59
- "dobject = ln.DObject(..., source=run)\n",
60
- "\n",
61
- "Or, if you're in a notebook, call `ln.nb.header()` at the top, which creates\n",
62
- "a global run context for the notebook.\n",
63
- "\n",
64
- "More details: https://lamin.ai/docs/faq/ingest\n",
65
- "\"\"\"\n",
66
- "with pytest.raises(ValueError, match=re.escape(NO_SOURCE_ERROR)):\n",
53
+ "with pytest.raises(ValueError):\n",
67
54
  " dobject = ln.DObject(df)"
68
55
  ]
69
56
  },
@@ -152,7 +139,7 @@
152
139
  "cell_type": "markdown",
153
140
  "metadata": {},
154
141
  "source": [
155
- "Alternatively, we can call `ln.nb.header()`, which auto-assigns the notebook run as the data source:"
142
+ "Alternatively, we can call `ln.Run()`, which auto-assigns the notebook run as the data source:"
156
143
  ]
157
144
  },
158
145
  {
@@ -161,7 +148,7 @@
161
148
  "metadata": {},
162
149
  "outputs": [],
163
150
  "source": [
164
- "ln.nb.header()"
151
+ "ln.Run()"
165
152
  ]
166
153
  },
167
154
  {
@@ -195,7 +182,7 @@
195
182
  "metadata": {},
196
183
  "outputs": [],
197
184
  "source": [
198
- "ln.nb.run"
185
+ "ln.context.run"
199
186
  ]
200
187
  },
201
188
  {
@@ -211,10 +198,7 @@
211
198
  "metadata": {},
212
199
  "outputs": [],
213
200
  "source": [
214
- "NO_NAME_ERROR = \"\"\"\n",
215
- "Pass a name in `ln.DObject(..., name=name)` when ingesting in-memory data.\n",
216
- "\"\"\"\n",
217
- "with pytest.raises(ValueError, match=re.escape(NO_NAME_ERROR)):\n",
201
+ "with pytest.raises(ValueError):\n",
218
202
  " dobject = ln.DObject(df)"
219
203
  ]
220
204
  }
@@ -29,7 +29,7 @@
29
29
  "import lamindb.schema as lns\n",
30
30
  "\n",
31
31
  "ln.setup.load(\"testuser1/mydata\")\n",
32
- "ln.nb.header()"
32
+ "ln.Run()"
33
33
  ]
34
34
  },
35
35
  {
@@ -68,7 +68,7 @@
68
68
  "outputs": [],
69
69
  "source": [
70
70
  "with ln.Session() as ss:\n",
71
- " features_session = ss.select(lns.Features, id=featureset_id).one()\n",
71
+ " features_session = ss.select(ln.Features, id=featureset_id).one()\n",
72
72
  " gene_subset = features_session.genes[:3]"
73
73
  ]
74
74
  },
@@ -12,17 +12,18 @@
12
12
  "cell_type": "markdown",
13
13
  "metadata": {},
14
14
  "source": [
15
- "We've seen the default behavior in the guide (passing `run=None` to `nb.header(`)`).\n",
15
+ "```{warning}\n",
16
16
  "\n",
17
- "Here, we look at the two other ways for calling it:\n",
17
+ "This API is subject to change! It's remaining for some time for backward compat.\n",
18
18
  "\n",
19
- "- Passing `run=\"new\"`\n",
20
- "- Passing an instance of `lns.Run`\n",
19
+ "```\n",
21
20
  "\n",
22
- "```{seealso}\n",
21
+ "We've seen the default behavior in the guide (passing `run=None` to `nb.header()`).\n",
22
+ "\n",
23
+ "Here, we look at the two other ways for calling it:\n",
23
24
  "\n",
24
- "For all other parameters, see the doc of [nbproject](https://lamin.ai/docs/nbproject).\n",
25
- "```"
25
+ "- Passing `run=\"new\"`\n",
26
+ "- Passing an instance of `lns.Run`"
26
27
  ]
27
28
  },
28
29
  {
@@ -68,6 +69,29 @@
68
69
  " ln.nb.header(run=\"foo\")"
69
70
  ]
70
71
  },
72
+ {
73
+ "cell_type": "markdown",
74
+ "metadata": {},
75
+ "source": [
76
+ "Passing nothing will load the latest run:"
77
+ ]
78
+ },
79
+ {
80
+ "cell_type": "code",
81
+ "execution_count": null,
82
+ "metadata": {},
83
+ "outputs": [],
84
+ "source": [
85
+ "ln.nb.header()"
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "markdown",
90
+ "metadata": {},
91
+ "source": [
92
+ "This publishes the notebook."
93
+ ]
94
+ },
71
95
  {
72
96
  "cell_type": "code",
73
97
  "execution_count": null,