geoai-py 0.6.0__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. {geoai_py-0.6.0 → geoai_py-0.7.0}/PKG-INFO +1 -1
  2. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/data_visualization.ipynb +3 -3
  3. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/download_data.ipynb +1 -1
  4. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/edit_vector.ipynb +2 -2
  5. geoai_py-0.7.0/docs/examples/train_landcover_classification.ipynb +271 -0
  6. geoai_py-0.7.0/docs/examples/train_segmentation_model.ipynb +411 -0
  7. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/__init__.py +1 -1
  8. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/geoai.py +7 -1
  9. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/train.py +1041 -9
  10. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/utils.py +13 -0
  11. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai_py.egg-info/PKG-INFO +1 -1
  12. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai_py.egg-info/SOURCES.txt +2 -0
  13. {geoai_py-0.6.0 → geoai_py-0.7.0}/mkdocs.yml +2 -0
  14. {geoai_py-0.6.0 → geoai_py-0.7.0}/pyproject.toml +2 -2
  15. {geoai_py-0.6.0 → geoai_py-0.7.0}/.editorconfig +0 -0
  16. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/FUNDING.yml +0 -0
  17. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  18. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/ISSUE_TEMPLATE/config.yml +0 -0
  19. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  20. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/dependabot.yml +0 -0
  21. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/workflows/docker-image.yml +0 -0
  22. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/workflows/docker-publish.yml +0 -0
  23. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/workflows/docs-build.yml +0 -0
  24. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/workflows/docs.yml +0 -0
  25. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/workflows/macos.yml +0 -0
  26. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/workflows/pypi.yml +0 -0
  27. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/workflows/ubuntu.yml +0 -0
  28. {geoai_py-0.6.0 → geoai_py-0.7.0}/.github/workflows/windows.yml +0 -0
  29. {geoai_py-0.6.0 → geoai_py-0.7.0}/.gitignore +0 -0
  30. {geoai_py-0.6.0 → geoai_py-0.7.0}/.pre-commit-config.yaml +0 -0
  31. {geoai_py-0.6.0 → geoai_py-0.7.0}/Dockerfile +0 -0
  32. {geoai_py-0.6.0 → geoai_py-0.7.0}/LICENSE +0 -0
  33. {geoai_py-0.6.0 → geoai_py-0.7.0}/MANIFEST.in +0 -0
  34. {geoai_py-0.6.0 → geoai_py-0.7.0}/README.md +0 -0
  35. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/CNAME +0 -0
  36. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/assets/logo.ico +0 -0
  37. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/assets/logo.png +0 -0
  38. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/assets/logo_rect.png +0 -0
  39. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/changelog.md +0 -0
  40. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/classify.md +0 -0
  41. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/contributing.md +0 -0
  42. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/download.md +0 -0
  43. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/_template.ipynb +0 -0
  44. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/building_footprints_africa.ipynb +0 -0
  45. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/building_footprints_china.ipynb +0 -0
  46. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/building_footprints_usa.ipynb +0 -0
  47. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/building_regularization.ipynb +0 -0
  48. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/car_detection.ipynb +0 -0
  49. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/create_vector.ipynb +0 -0
  50. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/dataviz/lidar_viz.ipynb +0 -0
  51. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/dataviz/raster_viz.ipynb +0 -0
  52. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/dataviz/vector_viz.ipynb +0 -0
  53. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/download_naip.ipynb +0 -0
  54. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/download_sentinel2.ipynb +0 -0
  55. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/geometric_properties.ipynb +0 -0
  56. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/globe_projection.ipynb +0 -0
  57. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/image_chips.ipynb +0 -0
  58. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/jupytext.toml +0 -0
  59. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/parking_spot_detection.ipynb +0 -0
  60. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/planetary_computer.ipynb +0 -0
  61. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/rastervision/semantic_segmentation.ipynb +0 -0
  62. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/regularization.ipynb +0 -0
  63. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/arcgis.ipynb +0 -0
  64. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/automatic_mask_generator.ipynb +0 -0
  65. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/automatic_mask_generator_hq.ipynb +0 -0
  66. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/box_prompts.ipynb +0 -0
  67. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/fast_sam.ipynb +0 -0
  68. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/input_prompts.ipynb +0 -0
  69. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/input_prompts_hq.ipynb +0 -0
  70. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/maxar_open_data.ipynb +0 -0
  71. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/satellite-predictor.ipynb +0 -0
  72. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/satellite.ipynb +0 -0
  73. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/swimming_pools.ipynb +0 -0
  74. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/text_prompts.ipynb +0 -0
  75. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo/text_prompts_batch.ipynb +0 -0
  76. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/samgeo.ipynb +0 -0
  77. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/ship_detection.ipynb +0 -0
  78. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/solar_panel_detection.ipynb +0 -0
  79. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/text_prompt_segmentation.ipynb +0 -0
  80. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/train_building_footprints_usa.ipynb +0 -0
  81. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/train_car_detection.ipynb +0 -0
  82. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/train_object_detection_model.ipynb +0 -0
  83. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/train_ship_detection.ipynb +0 -0
  84. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/train_solar_panel_detection.ipynb +0 -0
  85. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/train_water_detection.ipynb +0 -0
  86. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/view_metadata.ipynb +0 -0
  87. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/water_dynamics.ipynb +0 -0
  88. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/examples/wetland_mapping.ipynb +0 -0
  89. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/extract.md +0 -0
  90. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/faq.md +0 -0
  91. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/geoai.md +0 -0
  92. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/hf.md +0 -0
  93. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/index.md +0 -0
  94. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/installation.md +0 -0
  95. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/overrides/main.html +0 -0
  96. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/sam.md +0 -0
  97. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/segment.md +0 -0
  98. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/segmentation.md +0 -0
  99. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/train.md +0 -0
  100. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/usage.md +0 -0
  101. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/utils.md +0 -0
  102. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/workshops/GeoAI_Workshop_2025.ipynb +0 -0
  103. {geoai_py-0.6.0 → geoai_py-0.7.0}/docs/workshops/jupytext.toml +0 -0
  104. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/classify.py +0 -0
  105. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/download.py +0 -0
  106. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/extract.py +0 -0
  107. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/hf.py +0 -0
  108. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/sam.py +0 -0
  109. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/segment.py +0 -0
  110. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai/segmentation.py +0 -0
  111. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai_py.egg-info/dependency_links.txt +0 -0
  112. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai_py.egg-info/entry_points.txt +0 -0
  113. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai_py.egg-info/requires.txt +0 -0
  114. {geoai_py-0.6.0 → geoai_py-0.7.0}/geoai_py.egg-info/top_level.txt +0 -0
  115. {geoai_py-0.6.0 → geoai_py-0.7.0}/requirements.txt +0 -0
  116. {geoai_py-0.6.0 → geoai_py-0.7.0}/requirements_docs.txt +0 -0
  117. {geoai_py-0.6.0 → geoai_py-0.7.0}/setup.cfg +0 -0
  118. {geoai_py-0.6.0 → geoai_py-0.7.0}/tests/__init__.py +0 -0
  119. {geoai_py-0.6.0 → geoai_py-0.7.0}/tests/test_geoai.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: geoai-py
3
- Version: 0.6.0
3
+ Version: 0.7.0
4
4
  Summary: A Python package for using Artificial Intelligence (AI) with geospatial data
5
5
  Author-email: Qiusheng Wu <giswqs@gmail.com>
6
6
  License: MIT License
@@ -334,7 +334,7 @@
334
334
  "cell_type": "markdown",
335
335
  "metadata": {},
336
336
  "source": [
337
- "Visualize the raster data on an interactive map with Google Satellite imagery as the background:"
337
+ "Visualize the raster data on an interactive map with Esri.WorldImagery imagery as the background:"
338
338
  ]
339
339
  },
340
340
  {
@@ -343,14 +343,14 @@
343
343
  "metadata": {},
344
344
  "outputs": [],
345
345
  "source": [
346
- "view_raster(data, basemap=\"Google Satellite\")"
346
+ "view_raster(data, basemap=\"Esri.WorldImagery\")"
347
347
  ]
348
348
  },
349
349
  {
350
350
  "cell_type": "markdown",
351
351
  "metadata": {},
352
352
  "source": [
353
- "This interactive visualization places the sampled data in its real-world geographic context, allowing you to see how it aligns with the Google Satellite imagery.\n",
353
+ "This interactive visualization places the sampled data in its real-world geographic context, allowing you to see how it aligns with the Esri.WorldImagery imagery.\n",
354
354
  "\n",
355
355
  "## Key Takeaways\n",
356
356
  "\n",
@@ -58,7 +58,7 @@
58
58
  "outputs": [],
59
59
  "source": [
60
60
  "m = leafmap.Map(center=[47.6526, -117.5923], zoom=16)\n",
61
- "m.add_basemap(\"Google Satellite\")\n",
61
+ "m.add_basemap(\"Esri.WorldImagery\")\n",
62
62
  "m"
63
63
  ]
64
64
  },
@@ -51,7 +51,7 @@
51
51
  "outputs": [],
52
52
  "source": [
53
53
  "m = geoai.MapLibre()\n",
54
- "m.add_basemap(\"Google Satellite\")\n",
54
+ "m.add_basemap(\"Esri.WorldImagery\")\n",
55
55
  "url = \"https://huggingface.co/datasets/giswqs/geospatial/resolve/main/naip_train_buildings.geojson\""
56
56
  ]
57
57
  },
@@ -102,7 +102,7 @@
102
102
  ],
103
103
  "metadata": {
104
104
  "kernelspec": {
105
- "display_name": "Python 3 (ipykernel)",
105
+ "display_name": "geo",
106
106
  "language": "python",
107
107
  "name": "python3"
108
108
  },
@@ -0,0 +1,271 @@
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "metadata": {
6
+ "vscode": {
7
+ "languageId": "raw"
8
+ }
9
+ },
10
+ "source": [
11
+ "# Train a Land Cover Classification Model\n",
12
+ "\n",
13
+ "[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/opengeos/geoai/blob/main/docs/examples/train_landcover_classification.ipynb)\n",
14
+ "\n",
15
+ "## Install packages\n",
16
+ "\n",
17
+ "To use the new functionality, ensure the required packages are installed."
18
+ ]
19
+ },
20
+ {
21
+ "cell_type": "code",
22
+ "execution_count": null,
23
+ "metadata": {},
24
+ "outputs": [],
25
+ "source": [
26
+ "# %pip install geoai-py"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "markdown",
31
+ "metadata": {
32
+ "vscode": {
33
+ "languageId": "raw"
34
+ }
35
+ },
36
+ "source": [
37
+ "## Import libraries"
38
+ ]
39
+ },
40
+ {
41
+ "cell_type": "code",
42
+ "execution_count": null,
43
+ "metadata": {},
44
+ "outputs": [],
45
+ "source": [
46
+ "import geoai"
47
+ ]
48
+ },
49
+ {
50
+ "cell_type": "markdown",
51
+ "metadata": {},
52
+ "source": [
53
+ "## Download sample data"
54
+ ]
55
+ },
56
+ {
57
+ "cell_type": "code",
58
+ "execution_count": null,
59
+ "metadata": {},
60
+ "outputs": [],
61
+ "source": [
62
+ "train_raster_url = \"https://huggingface.co/datasets/giswqs/geospatial/resolve/main/m_3807511_ne_18_060_20181104.tif\"\n",
63
+ "train_landcover_url = \"https://huggingface.co/datasets/giswqs/geospatial/resolve/main/m_3807511_ne_18_060_20181104_landcover.tif\"\n",
64
+ "test_raster_url = \"https://huggingface.co/datasets/giswqs/geospatial/resolve/main/m_3807511_se_18_060_20181104.tif\""
65
+ ]
66
+ },
67
+ {
68
+ "cell_type": "code",
69
+ "execution_count": null,
70
+ "metadata": {},
71
+ "outputs": [],
72
+ "source": [
73
+ "train_raster_path = geoai.download_file(train_raster_url)\n",
74
+ "train_landcover_path = geoai.download_file(train_landcover_url)\n",
75
+ "test_raster_path = geoai.download_file(test_raster_url)"
76
+ ]
77
+ },
78
+ {
79
+ "cell_type": "markdown",
80
+ "metadata": {},
81
+ "source": [
82
+ "## Visualize sample data"
83
+ ]
84
+ },
85
+ {
86
+ "cell_type": "code",
87
+ "execution_count": null,
88
+ "metadata": {},
89
+ "outputs": [],
90
+ "source": [
91
+ "geoai.view_raster(train_landcover_url, basemap=train_raster_url)"
92
+ ]
93
+ },
94
+ {
95
+ "cell_type": "code",
96
+ "execution_count": null,
97
+ "metadata": {},
98
+ "outputs": [],
99
+ "source": [
100
+ "geoai.view_raster(test_raster_url)"
101
+ ]
102
+ },
103
+ {
104
+ "cell_type": "markdown",
105
+ "metadata": {
106
+ "vscode": {
107
+ "languageId": "raw"
108
+ }
109
+ },
110
+ "source": [
111
+ "## Create training data\n",
112
+ "\n",
113
+ "We will use the NAIP dataset for land cover classification. The classification scheme is adopted from the [Chesapeake Land Cover](https://planetarycomputer.microsoft.com/dataset/chesapeake-lc-13) project.\n",
114
+ "\n",
115
+ "**Important Note for Land Cover Classification:**\n",
116
+ "- Your label images should contain integer class values (0, 1, 2, ..., 13 for 13 classes)\n",
117
+ "- Do NOT use binary masks - the training code now properly handles multi-class labels\n",
118
+ "- Class 0 is typically background, classes 1-12 are your land cover types"
119
+ ]
120
+ },
121
+ {
122
+ "cell_type": "code",
123
+ "execution_count": null,
124
+ "metadata": {},
125
+ "outputs": [],
126
+ "source": [
127
+ "out_folder = \"landcover\"\n",
128
+ "tiles = geoai.export_geotiff_tiles(\n",
129
+ " in_raster=train_raster_path,\n",
130
+ " out_folder=out_folder,\n",
131
+ " in_class_data=train_landcover_path,\n",
132
+ " tile_size=512,\n",
133
+ " stride=256,\n",
134
+ " buffer_radius=0,\n",
135
+ ")"
136
+ ]
137
+ },
138
+ {
139
+ "cell_type": "markdown",
140
+ "metadata": {
141
+ "vscode": {
142
+ "languageId": "raw"
143
+ }
144
+ },
145
+ "source": [
146
+ "## Train semantic segmentation model\n",
147
+ "\n",
148
+ "Now we'll train a semantic segmentation model using the new `train_object_detection` function. This function supports various architectures from `segmentation-models-pytorch`:\n",
149
+ "\n",
150
+ "- **Architectures**: `unet`, `deeplabv3`, `deeplabv3plus`, `fpn`, `pspnet`, `linknet`, `manet`\n",
151
+ "- **Encoders**: `resnet34`, `resnet50`, `efficientnet-b0`, `mobilenet_v2`, etc."
152
+ ]
153
+ },
154
+ {
155
+ "cell_type": "code",
156
+ "execution_count": null,
157
+ "metadata": {},
158
+ "outputs": [],
159
+ "source": [
160
+ "# Train U-Net model\n",
161
+ "geoai.train_segmentation_model(\n",
162
+ " images_dir=f\"{out_folder}/images\",\n",
163
+ " labels_dir=f\"{out_folder}/labels\",\n",
164
+ " output_dir=f\"{out_folder}/unet_models\",\n",
165
+ " architecture=\"unet\",\n",
166
+ " encoder_name=\"resnet34\",\n",
167
+ " encoder_weights=\"imagenet\",\n",
168
+ " num_channels=4,\n",
169
+ " num_classes=13,\n",
170
+ " batch_size=8,\n",
171
+ " num_epochs=50,\n",
172
+ " learning_rate=0.001,\n",
173
+ " val_split=0.2,\n",
174
+ " verbose=True,\n",
175
+ " plot_curves=True,\n",
176
+ ")"
177
+ ]
178
+ },
179
+ {
180
+ "cell_type": "markdown",
181
+ "metadata": {
182
+ "vscode": {
183
+ "languageId": "raw"
184
+ }
185
+ },
186
+ "source": [
187
+ "## Run inference\n",
188
+ "\n",
189
+ "Now we'll use the trained model to make predictions on the test image."
190
+ ]
191
+ },
192
+ {
193
+ "cell_type": "code",
194
+ "execution_count": null,
195
+ "metadata": {},
196
+ "outputs": [],
197
+ "source": [
198
+ "# Define paths\n",
199
+ "masks_path = \"naip_test_semantic_prediction.tif\"\n",
200
+ "model_path = f\"{out_folder}/unet_models/best_model.pth\""
201
+ ]
202
+ },
203
+ {
204
+ "cell_type": "code",
205
+ "execution_count": null,
206
+ "metadata": {},
207
+ "outputs": [],
208
+ "source": [
209
+ "# Run semantic segmentation inference\n",
210
+ "geoai.semantic_segmentation(\n",
211
+ " input_path=test_raster_path,\n",
212
+ " output_path=masks_path,\n",
213
+ " model_path=model_path,\n",
214
+ " architecture=\"unet\",\n",
215
+ " encoder_name=\"resnet34\",\n",
216
+ " num_channels=4,\n",
217
+ " num_classes=13,\n",
218
+ " window_size=512,\n",
219
+ " overlap=256,\n",
220
+ " batch_size=4,\n",
221
+ ")"
222
+ ]
223
+ },
224
+ {
225
+ "cell_type": "markdown",
226
+ "metadata": {},
227
+ "source": [
228
+ "## Visualize results"
229
+ ]
230
+ },
231
+ {
232
+ "cell_type": "code",
233
+ "execution_count": null,
234
+ "metadata": {},
235
+ "outputs": [],
236
+ "source": [
237
+ "geoai.write_colormap(masks_path, train_landcover_path, output=masks_path)"
238
+ ]
239
+ },
240
+ {
241
+ "cell_type": "code",
242
+ "execution_count": null,
243
+ "metadata": {},
244
+ "outputs": [],
245
+ "source": [
246
+ "geoai.view_raster(masks_path, basemap=test_raster_url)"
247
+ ]
248
+ }
249
+ ],
250
+ "metadata": {
251
+ "kernelspec": {
252
+ "display_name": "geo",
253
+ "language": "python",
254
+ "name": "python3"
255
+ },
256
+ "language_info": {
257
+ "codemirror_mode": {
258
+ "name": "ipython",
259
+ "version": 3
260
+ },
261
+ "file_extension": ".py",
262
+ "mimetype": "text/x-python",
263
+ "name": "python",
264
+ "nbconvert_exporter": "python",
265
+ "pygments_lexer": "ipython3",
266
+ "version": "3.12.2"
267
+ }
268
+ },
269
+ "nbformat": 4,
270
+ "nbformat_minor": 4
271
+ }