rslearn 0.0.11__py3-none-any.whl → 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. rslearn/config/dataset.py +23 -4
  2. rslearn/data_sources/planetary_computer.py +52 -0
  3. rslearn/dataset/handler_summaries.py +1 -0
  4. rslearn/dataset/manage.py +16 -2
  5. rslearn/models/anysat.py +5 -1
  6. rslearn/models/dinov3.py +6 -1
  7. rslearn/models/feature_center_crop.py +50 -0
  8. rslearn/models/olmoearth_pretrain/model.py +88 -27
  9. rslearn/models/prithvi.py +9 -1
  10. rslearn/train/lightning_module.py +0 -3
  11. rslearn/train/prediction_writer.py +25 -8
  12. rslearn/train/tasks/classification.py +2 -2
  13. rslearn/train/tasks/detection.py +5 -5
  14. rslearn/train/tasks/embedding.py +116 -0
  15. rslearn/train/tasks/per_pixel_regression.py +5 -4
  16. rslearn/train/tasks/regression.py +5 -5
  17. rslearn/train/transforms/pad.py +3 -3
  18. rslearn/utils/raster_format.py +38 -0
  19. {rslearn-0.0.11.dist-info → rslearn-0.0.13.dist-info}/METADATA +3 -2
  20. {rslearn-0.0.11.dist-info → rslearn-0.0.13.dist-info}/RECORD +25 -31
  21. rslearn-0.0.13.dist-info/licenses/NOTICE +115 -0
  22. rslearn/models/copernicusfm.py +0 -228
  23. rslearn/models/copernicusfm_src/__init__.py +0 -1
  24. rslearn/models/copernicusfm_src/aurora/area.py +0 -50
  25. rslearn/models/copernicusfm_src/aurora/fourier.py +0 -134
  26. rslearn/models/copernicusfm_src/dynamic_hypernetwork.py +0 -523
  27. rslearn/models/copernicusfm_src/flexivit/patch_embed.py +0 -260
  28. rslearn/models/copernicusfm_src/flexivit/utils.py +0 -69
  29. rslearn/models/copernicusfm_src/model_vit.py +0 -348
  30. rslearn/models/copernicusfm_src/util/pos_embed.py +0 -216
  31. {rslearn-0.0.11.dist-info → rslearn-0.0.13.dist-info}/WHEEL +0 -0
  32. {rslearn-0.0.11.dist-info → rslearn-0.0.13.dist-info}/entry_points.txt +0 -0
  33. {rslearn-0.0.11.dist-info → rslearn-0.0.13.dist-info}/licenses/LICENSE +0 -0
  34. {rslearn-0.0.11.dist-info → rslearn-0.0.13.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,116 @@
1
+ """Embedding task."""
2
+
3
+ from typing import Any
4
+
5
+ import numpy.typing as npt
6
+ import torch
7
+ from torchmetrics import MetricCollection
8
+
9
+ from rslearn.utils import Feature
10
+
11
+ from .task import Task
12
+
13
+
14
+ class EmbeddingTask(Task):
15
+ """A dummy task for computing embeddings.
16
+
17
+ This task does not compute any targets or loss. Instead, it is just set up for
18
+ inference, to save embeddings from the configured model.
19
+ """
20
+
21
+ def process_inputs(
22
+ self,
23
+ raw_inputs: dict[str, torch.Tensor],
24
+ metadata: dict[str, Any],
25
+ load_targets: bool = True,
26
+ ) -> tuple[dict[str, Any], dict[str, Any]]:
27
+ """Processes the data into targets.
28
+
29
+ Args:
30
+ raw_inputs: raster or vector data to process
31
+ metadata: metadata about the patch being read
32
+ load_targets: whether to load the targets or only inputs
33
+
34
+ Returns:
35
+ tuple (input_dict, target_dict) containing the processed inputs and targets
36
+ that are compatible with both metrics and loss functions
37
+ """
38
+ return {}, {}
39
+
40
+ def process_output(
41
+ self, raw_output: Any, metadata: dict[str, Any]
42
+ ) -> npt.NDArray[Any] | list[Feature]:
43
+ """Processes an output into raster or vector data.
44
+
45
+ Args:
46
+ raw_output: the output from prediction head.
47
+ metadata: metadata about the patch being read
48
+
49
+ Returns:
50
+ either raster or vector data.
51
+ """
52
+ # Just convert the raw output to numpy array that can be saved to GeoTIFF.
53
+ return raw_output.cpu().numpy()
54
+
55
+ def visualize(
56
+ self,
57
+ input_dict: dict[str, Any],
58
+ target_dict: dict[str, Any] | None,
59
+ output: Any,
60
+ ) -> dict[str, npt.NDArray[Any]]:
61
+ """Visualize the outputs and targets.
62
+
63
+ Args:
64
+ input_dict: the input dict from process_inputs
65
+ target_dict: the target dict from process_inputs
66
+ output: the prediction
67
+
68
+ Returns:
69
+ a dictionary mapping image name to visualization image
70
+ """
71
+ # EmbeddingTask is only set up to support `model predict`.
72
+ raise NotImplementedError
73
+
74
+ def get_metrics(self) -> MetricCollection:
75
+ """Get the metrics for this task."""
76
+ return MetricCollection({})
77
+
78
+
79
+ class EmbeddingHead(torch.nn.Module):
80
+ """Head for embedding task.
81
+
82
+ This picks one feature map from the input list of feature maps to output. It also
83
+ returns a dummy loss.
84
+ """
85
+
86
+ def __init__(self, feature_map_index: int | None = 0):
87
+ """Create a new EmbeddingHead.
88
+
89
+ Args:
90
+ feature_map_index: the index of the feature map to choose from the input
91
+ list of multi-scale feature maps (default 0). If the input is already
92
+ a single feature map, then set to None.
93
+ """
94
+ super().__init__()
95
+ self.feature_map_index = feature_map_index
96
+
97
+ def forward(
98
+ self,
99
+ features: torch.Tensor,
100
+ inputs: list[dict[str, Any]],
101
+ targets: list[dict[str, Any]] | None = None,
102
+ ) -> tuple[torch.Tensor, dict[str, Any]]:
103
+ """Select the desired feature map and return it along with a dummy loss.
104
+
105
+ Args:
106
+ features: list of BCHW feature maps (or one feature map, if feature_map_index is None).
107
+ inputs: original inputs (ignored).
108
+ targets: should contain classes key that stores the per-pixel class labels.
109
+
110
+ Returns:
111
+ tuple of outputs and loss dict
112
+ """
113
+ if self.feature_map_index is not None:
114
+ features = features[self.feature_map_index]
115
+
116
+ return features, {"loss": 0}
@@ -26,10 +26,11 @@ class PerPixelRegressionTask(BasicTask):
26
26
  """Initialize a new PerPixelRegressionTask.
27
27
 
28
28
  Args:
29
- scale_factor: multiply the label value by this factor before using it for
29
+ scale_factor: multiply ground truth values by this factor before using it for
30
30
  training.
31
- metric_mode: what metric to use, either mse or l1
32
- nodata_value: optional value to treat as invalid
31
+ metric_mode: what metric to use, either "mse" (default) or "l1"
32
+ nodata_value: optional value to treat as invalid. The loss will be masked
33
+ at pixels where the ground truth value is equal to nodata_value.
33
34
  kwargs: other arguments to pass to BasicTask
34
35
  """
35
36
  super().__init__(**kwargs)
@@ -141,7 +142,7 @@ class PerPixelRegressionHead(torch.nn.Module):
141
142
  """Initialize a new RegressionHead.
142
143
 
143
144
  Args:
144
- loss_mode: the loss function to use, either "mse" or "l1".
145
+ loss_mode: the loss function to use, either "mse" (default) or "l1".
145
146
  use_sigmoid: whether to apply a sigmoid activation on the output. This
146
147
  requires targets to be between 0-1.
147
148
  """
@@ -33,14 +33,14 @@ class RegressionTask(BasicTask):
33
33
  """Initialize a new RegressionTask.
34
34
 
35
35
  Args:
36
- property_name: the property from which to extract the regression value. The
37
- value is read from the first matching feature.
36
+ property_name: the property from which to extract the ground truth
37
+ regression value. The value is read from the first matching feature.
38
38
  filters: optional list of (property_name, property_value) to only consider
39
39
  features with matching properties.
40
40
  allow_invalid: instead of throwing error when no regression label is found
41
41
  at a window, simply mark the example invalid for this task
42
- scale_factor: multiply the label value by this factor
43
- metric_mode: what metric to use, either mse or l1
42
+ scale_factor: multiply the label value by this factor for training
43
+ metric_mode: what metric to use, either "mse" (default) or "l1"
44
44
  use_accuracy_metric: include metric that reports percentage of
45
45
  examples where output is within a factor of the ground truth.
46
46
  within_factor: the factor for accuracy metric. If it's 0.2, and ground
@@ -189,7 +189,7 @@ class RegressionHead(torch.nn.Module):
189
189
  """Initialize a new RegressionHead.
190
190
 
191
191
  Args:
192
- loss_mode: the loss function to use, either "mse" or "l1".
192
+ loss_mode: the loss function to use, either "mse" (default) or "l1".
193
193
  use_sigmoid: whether to apply a sigmoid activation on the output. This
194
194
  requires targets to be between 0-1.
195
195
  """
@@ -25,8 +25,8 @@ class Pad(Transform):
25
25
  Args:
26
26
  size: the size to pad to, or a min/max range of pad sizes. If the image is
27
27
  larger than this size, then it is cropped instead.
28
- mode: "center" (default) to apply padding equally on all sides, or
29
- "topleft" to only apply it on the bottom and right.
28
+ mode: "topleft" (default) to only apply padding on the bottom and right
29
+ sides, or "center" to apply padding equally on all sides.
30
30
  image_selectors: image items to transform.
31
31
  box_selectors: boxes items to transform.
32
32
  """
@@ -64,7 +64,7 @@ class Pad(Transform):
64
64
  ) -> torch.Tensor:
65
65
  # Before/after must either be both non-negative or both negative.
66
66
  # >=0 indicates padding while <0 indicates cropping.
67
- assert (before < 0 and after < 0) or (before >= 0 and after >= 0)
67
+ assert (before < 0 and after <= 0) or (before >= 0 and after >= 0)
68
68
  if before > 0:
69
69
  # Padding.
70
70
  if horizontal:
@@ -123,6 +123,44 @@ def get_transform_from_projection_and_bounds(
123
123
  )
124
124
 
125
125
 
126
+ def adjust_projection_and_bounds_for_array(
127
+ projection: Projection, bounds: PixelBounds, array: npt.NDArray
128
+ ) -> tuple[Projection, PixelBounds]:
129
+ """Adjust the projection and bounds to correspond to the resolution of the array.
130
+
131
+ The returned projection and bounds cover the same spatial extent as the inputs, but
132
+ are updated so that the width and height match that of the array.
133
+
134
+ Args:
135
+ projection: the original projection.
136
+ bounds: the original bounds.
137
+ array: the CHW array for which to compute an updated projection and bounds. The
138
+ returned bounds will have the same width and height as this array.
139
+
140
+ Returns:
141
+ a tuple of adjusted (projection, bounds)
142
+ """
143
+ if array.shape[2] == (bounds[2] - bounds[0]) and array.shape[1] == (
144
+ bounds[3] - bounds[1]
145
+ ):
146
+ return (projection, bounds)
147
+
148
+ x_factor = array.shape[2] / (bounds[2] - bounds[0])
149
+ y_factor = array.shape[1] / (bounds[3] - bounds[1])
150
+ adjusted_projection = Projection(
151
+ projection.crs,
152
+ projection.x_resolution / x_factor,
153
+ projection.y_resolution / y_factor,
154
+ )
155
+ adjusted_bounds = (
156
+ round(bounds[0] * x_factor),
157
+ round(bounds[1] * y_factor),
158
+ round(bounds[0] * x_factor) + array.shape[2],
159
+ round(bounds[1] * y_factor) + array.shape[1],
160
+ )
161
+ return (adjusted_projection, adjusted_bounds)
162
+
163
+
126
164
  class RasterFormat:
127
165
  """An abstract class for writing raster data.
128
166
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rslearn
3
- Version: 0.0.11
3
+ Version: 0.0.13
4
4
  Summary: A library for developing remote sensing datasets and models
5
5
  Author: OlmoEarth Team
6
6
  License: Apache License
@@ -211,9 +211,10 @@ Project-URL: repository, https://github.com/allenai/rslearn
211
211
  Requires-Python: >=3.11
212
212
  Description-Content-Type: text/markdown
213
213
  License-File: LICENSE
214
+ License-File: NOTICE
214
215
  Requires-Dist: boto3>=1.39
215
216
  Requires-Dist: fiona>=1.10
216
- Requires-Dist: fsspec>=2025.9.0
217
+ Requires-Dist: fsspec>=2025.10.0
217
218
  Requires-Dist: jsonargparse>=4.35.0
218
219
  Requires-Dist: lightning>=2.5.1.post0
219
220
  Requires-Dist: Pillow>=11.3
@@ -6,7 +6,7 @@ rslearn/main.py,sha256=fLYmm2ZsUTCaJBKZvxu3pc4fB2thaf-p2Qv0AifDlXM,31292
6
6
  rslearn/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  rslearn/template_params.py,sha256=Vop0Ha-S44ctCa9lvSZRjrMETznJZlR5y_gJrVIwrPg,791
8
8
  rslearn/config/__init__.py,sha256=Bhf2VVncdMYRC8Wfb4GsJJ13OAJYNCO_ODLSNTmBOHM,638
9
- rslearn/config/dataset.py,sha256=cR6Jd9ppzHgKHCteUsNapCcsJk4k5X90E71EHfbW7m0,21046
9
+ rslearn/config/dataset.py,sha256=VpXUGKCr45kzE-W27rgF4tPQuyICfwQkJTxb2z9aXQM,21685
10
10
  rslearn/data_sources/__init__.py,sha256=8_7Pi3agKsatNoxXw74-U5G-QAP-rbdfcH8EkZfJbH4,1449
11
11
  rslearn/data_sources/aws_landsat.py,sha256=GA9H04KagBDm-N37jFdh_aHCX2ZneVdnqT1SNOyAwTs,20829
12
12
  rslearn/data_sources/aws_open_data.py,sha256=nU_D5cqc-wibxq4uyUNb0z-XD0Puf1gZ8v5FMiMAN5w,30258
@@ -24,7 +24,7 @@ rslearn/data_sources/local_files.py,sha256=d08m6IzrUN_80VfvgpHahMJrv-n6_CI6EIocp
24
24
  rslearn/data_sources/openstreetmap.py,sha256=qUSMFiIA_laJkO3meBXf9TmSI7OBD-o3i4JxqllUv3Q,19232
25
25
  rslearn/data_sources/planet.py,sha256=F2JoLaQ5Cb3k1cTm0hwSWTL2TPfbaAUMXZ8q4Dy7UlA,10109
26
26
  rslearn/data_sources/planet_basemap.py,sha256=wuWM9dHSJMdINfyWb78Zk9i-KvJHTrf9J0Q2gyEyiiA,10450
27
- rslearn/data_sources/planetary_computer.py,sha256=uHNYxvnMkmo8zbqIiDRdnkz8LQ7TSs6K39Y1AXjboDI,30392
27
+ rslearn/data_sources/planetary_computer.py,sha256=Vi-aBHQe-BA8NjRyPMgurMAdo3sK6PJteCK5MwXygJo,31869
28
28
  rslearn/data_sources/raster_source.py,sha256=b8wo55GhVLxXwx1WYLzeRAlzD_ZkE_P9tnvUOdnsfQE,689
29
29
  rslearn/data_sources/usda_cdl.py,sha256=2_V11AhPRgLEGd4U5Pmx3UvE2HWBPbsFXhUIQVRVFeE,7138
30
30
  rslearn/data_sources/usgs_landsat.py,sha256=31GmOUfmxwTE6MTiVI4psb-ciVmunuA8cfvqDuvTHPE,19312
@@ -37,20 +37,20 @@ rslearn/data_sources/xyz_tiles.py,sha256=SJV8TB6WUP6DTPr2d3LXRKVjFxda7bdR9IM84Vv
37
37
  rslearn/dataset/__init__.py,sha256=bHtBlEEBCekO-gaJqiww0-VjvZTE5ahx0llleo8bfP8,289
38
38
  rslearn/dataset/add_windows.py,sha256=pwCEvwLE1jQCoqQxw6CJ-sP46ayWppFa2hGYIB6VVkc,8494
39
39
  rslearn/dataset/dataset.py,sha256=bjf9nI55j-MF0bIQWSNPjNbpfqnLK4jy-96TAcwO0MM,5214
40
- rslearn/dataset/handler_summaries.py,sha256=wGnbBpjLWTxVn3UT7j7nPoHlYsaWb9_MVJ5DhU0qWXY,2581
40
+ rslearn/dataset/handler_summaries.py,sha256=wI99RDk5erCWkzl1A7Uc4chatQ9KWIr4F_0Hxr9Co6s,2607
41
41
  rslearn/dataset/index.py,sha256=Wni5m6h4gisRB54fPLnCfUrRTEsJ5EvwS0fs9sYc2wg,6025
42
- rslearn/dataset/manage.py,sha256=mkdBHo1RFGxMx8f9zBT_VmRO_6y8Qb2KfWPPziKWYkg,18062
42
+ rslearn/dataset/manage.py,sha256=owelBiBqvoIQYLhFMDK4ULzcoGBNE27JV8kl68jf3wg,18563
43
43
  rslearn/dataset/materialize.py,sha256=-z47svc_JqGhzkp8kq5Hd9fykWNqFEUCQezo887TWBw,22056
44
44
  rslearn/dataset/remap.py,sha256=6MaImsY02GNACpvRM81RvWmjZWRfAHxo_R3Ox6XLF6A,2723
45
45
  rslearn/dataset/window.py,sha256=I5RqZ12jlIXhohw4qews1x_I4tSDpml709DZRtLiN24,12546
46
46
  rslearn/models/__init__.py,sha256=_vWoF9d2Slah8-6XhYhdU4SRsy_CNxXjCGQTD2yvu3Q,22
47
- rslearn/models/anysat.py,sha256=3BnaiS1sYB4SnV6qRjHksiz_r9vUuZeGPUO2XUziFA0,7810
47
+ rslearn/models/anysat.py,sha256=3Oh2gWxicVdUzOjevBEZf0PuolmCy0KC5Ad7JY-0Plc,7949
48
48
  rslearn/models/clip.py,sha256=u5aqYnVB4Jag7o1h8EzPDAc1t2BAHeALA9FcUwP5tfo,2238
49
49
  rslearn/models/conv.py,sha256=fWyByeswIOKKzyPmP3erYUlZaKEV0huWHA4CyKTBbfY,1703
50
- rslearn/models/copernicusfm.py,sha256=3AiORuUre9sZYwydbrDgShwKtxeTLmExp7WQmJtBylg,7842
51
50
  rslearn/models/croma.py,sha256=cOazTp3l2PNJltKrmPqD5Gy4pi3CI03-X9G4T10cX2k,9529
52
- rslearn/models/dinov3.py,sha256=GKk5qXZPCEporATJdjaSWsDTfWDlAGRWBplFUJN5nRM,6146
51
+ rslearn/models/dinov3.py,sha256=9k9kNlXCorQQwKjLGptooANd48TUBsITQ1e4fUomlM4,6337
53
52
  rslearn/models/faster_rcnn.py,sha256=uaxX6-E1f0BibaA9sorEg3be83C7kTdTc39pC5jRqwE,8286
53
+ rslearn/models/feature_center_crop.py,sha256=24eOrvLEGGVWPw7kPHyUes5HtYNAX7GZ_NpqDGMILEY,1553
54
54
  rslearn/models/fpn.py,sha256=s3cz29I14FaSuvBvLOcwCrqVsaRBxG5GjLlqap4WgPc,1603
55
55
  rslearn/models/module_wrapper.py,sha256=H2zb-8Au4t31kawW_4JEKHsaXFjpYDawb31ZEauKcxU,2728
56
56
  rslearn/models/molmo.py,sha256=mVrARBhZciMzOgOOjGB5AHlPIf2iO9IBSJmdyKSl1L8,2061
@@ -58,7 +58,7 @@ rslearn/models/multitask.py,sha256=j2Kiwj_dUiUp_CIUr25bS8HiyeoFlr1PGqjTfpgIGLc,1
58
58
  rslearn/models/panopticon.py,sha256=woNEs53wVc5D-NxbSDEPRZ_mYe8vllnuldmADjvhfDQ,5806
59
59
  rslearn/models/pick_features.py,sha256=y8e4tJFhyG7ZuVSElWhQ5-Aer4ZKJCEH9wLGJU7WqGI,1551
60
60
  rslearn/models/pooling_decoder.py,sha256=unr2fSE_QmJHPi3dKtopqMtb1Kn-2h94LgwwAVP9vZg,4437
61
- rslearn/models/prithvi.py,sha256=SVM3ypJlVTkXQ69pPhB4UeJr87VnmADTCuyV365dbkU,39961
61
+ rslearn/models/prithvi.py,sha256=AIzcO5xk1ggR0MjbfhIzqPVgUKFN7odxygmgyAelfW8,40143
62
62
  rslearn/models/registry.py,sha256=yCcrOvLkbn07Xtln1j7hAB_kmGw0MGsiR2TloJq9Bmk,504
63
63
  rslearn/models/resize_features.py,sha256=asKXWrLHIBrU6GaAV0Ory9YuK7IK104XjhkB4ljzI3A,1289
64
64
  rslearn/models/sam2_enc.py,sha256=gNlPokr7eNxO2KvnzDMXNxYM2WRO0YkQPjR4110n6cw,3508
@@ -75,14 +75,6 @@ rslearn/models/upsample.py,sha256=3kWbyWZIk56JJxj8en9pieitbrk3XnbIsTKlEkiDQQY,93
75
75
  rslearn/models/use_croma.py,sha256=OSBqMuLp-pDtqPNWAVBfmX4wckmyYCKtUDdGCjJk_K8,17966
76
76
  rslearn/models/clay/clay.py,sha256=5RO5H8EM0tKjCwWMQ4xDkKkUCwKpm2K_Yw1alnhvVhU,7773
77
77
  rslearn/models/clay/configs/metadata.yaml,sha256=rZTFh4Yb9htEfbQNOPl4HTbFogEhzwIRqFzG-1uT01Y,4652
78
- rslearn/models/copernicusfm_src/__init__.py,sha256=8QLhisbHub6VJl6egijnrOPKK5QNAe5FJhfcxEelj4Y,22
79
- rslearn/models/copernicusfm_src/dynamic_hypernetwork.py,sha256=aWH5_PgmS8umIwRbGA42RuEx-stb13z1nBjyUhBtaN4,18218
80
- rslearn/models/copernicusfm_src/model_vit.py,sha256=3coM_xYILlFY2TJiACmQBSe2z16jSG80SVEad_3uB3Q,11396
81
- rslearn/models/copernicusfm_src/aurora/area.py,sha256=ssg9aXgoZktOsFcEXDEY9670aPUN_PHfCOfDMtpsz1s,1711
82
- rslearn/models/copernicusfm_src/aurora/fourier.py,sha256=bmoNV3P6CH8R6W2GFuVW8zT_frQVaL-PAgpN3aFS5fA,4414
83
- rslearn/models/copernicusfm_src/flexivit/patch_embed.py,sha256=EQgbsHBXDq0dTM9kApmmIqd5ZV2X9CPuA_AytbE51uM,9363
84
- rslearn/models/copernicusfm_src/flexivit/utils.py,sha256=tLBlzgT5bpwMSvyir46bPRWsMmRKh8s7VwMNuvSatGo,2192
85
- rslearn/models/copernicusfm_src/util/pos_embed.py,sha256=dUYuM_Nch2LB8jQ7UDTmFj36KWe4mM9bsY6dv5m_yZI,8511
86
78
  rslearn/models/detr/__init__.py,sha256=GGAnTIhyuvl34IRrJ_4gXjm_01OlM5rbQQ3c3TGfbK8,84
87
79
  rslearn/models/detr/box_ops.py,sha256=ORCF6EwMpMBB_VgQT05SjR47dCR2rN2gPhL_gsuUWJs,3236
88
80
  rslearn/models/detr/detr.py,sha256=otLmmyUm05e4MUyvQBoqo-RKnx3hbodTXvfPQWvuTEI,18737
@@ -94,7 +86,7 @@ rslearn/models/galileo/__init__.py,sha256=QQa0C29nuPRva0KtGiMHQ2ZB02n9SSwj_wqTKP
94
86
  rslearn/models/galileo/galileo.py,sha256=jUHA64YvVC3Fz5fevc_9dFJfZaINODRDrhSGLIiOZcw,21115
95
87
  rslearn/models/galileo/single_file_galileo.py,sha256=l5tlmmdr2eieHNH-M7rVIvcptkv0Fuk3vKXFW691ezA,56143
96
88
  rslearn/models/olmoearth_pretrain/__init__.py,sha256=AjRvbjBdadCdPh-EdvySH76sVAQ8NGQaJt11Tsn1D5I,36
97
- rslearn/models/olmoearth_pretrain/model.py,sha256=F-B1ym9UZuTPJ0OY15Jwb1TkNtr_EtAUlqI-tr_Z2uo,8352
89
+ rslearn/models/olmoearth_pretrain/model.py,sha256=ZJgoyy7vwB0PUMJtHF-sdJ-uSBqnUXMDBco0Dx4cAes,10670
98
90
  rslearn/models/olmoearth_pretrain/norm.py,sha256=rHjFyWkpNLYMx9Ow7TsU-jGm9Sjx7FVf0p4R__ohx2c,3266
99
91
  rslearn/models/panopticon_data/sensors/drone.yaml,sha256=xqWS-_QMtJyRoWXJm-igoSur9hAmCFdqkPin8DT5qpw,431
100
92
  rslearn/models/panopticon_data/sensors/enmap.yaml,sha256=b2j6bSgYR2yKR9DRm3SPIzSVYlHf51ny_p-1B4B9sB4,13431
@@ -117,9 +109,9 @@ rslearn/tile_stores/tile_store.py,sha256=9AeYduDYPp_Ia2NMlq6osptpz_AFGIOQcLJrqZ_
117
109
  rslearn/train/__init__.py,sha256=fnJyY4aHs5zQqbDKSfXsJZXY_M9fbTsf7dRYaPwZr2M,30
118
110
  rslearn/train/data_module.py,sha256=K-nQgnOZn-KGq_G2pVOQFtWRrlWih0212i_bkXZ2bEE,23515
119
111
  rslearn/train/dataset.py,sha256=YiskNlYYcKqZxyw0Xzop1RGLbjMc-oK_rmhrSMVbTQg,51857
120
- rslearn/train/lightning_module.py,sha256=ge2z8trU7cMvxBeqUXC1tB44pftzitw7DRsIa6asBS4,14623
112
+ rslearn/train/lightning_module.py,sha256=ZLBiId3secUlVs2yzkN-mwVv4rMdh5TkdZYl4vv_Cw0,14466
121
113
  rslearn/train/optimizer.py,sha256=EKSqkmERalDA0bF32Gey7n6z69KLyaUWKlRsGJfKBmE,927
122
- rslearn/train/prediction_writer.py,sha256=YNs92QqPrqbREZXoE-aPa_oKQW0C9LvZAY129vyvI08,13288
114
+ rslearn/train/prediction_writer.py,sha256=mDvREwEB5k5_tNuBnYIvAGnxS3sYFWQYvV07V3UEe2k,14106
123
115
  rslearn/train/scheduler.py,sha256=wFbmycMHgL6nRYeYalDjb0G8YVo8VD3T3sABS61jJ7c,2318
124
116
  rslearn/train/callbacks/__init__.py,sha256=VNV0ArZyYMvl3dGK2wl6F046khYJ1dEBlJS6G_SYNm0,47
125
117
  rslearn/train/callbacks/adapters.py,sha256=yfv8nyCj3jmo2_dNkFrjukKxh0MHsf2xKqWwMF0QUtY,1869
@@ -127,11 +119,12 @@ rslearn/train/callbacks/freeze_unfreeze.py,sha256=8fIzBMhCKKjpTffIeAdhdSjsBd8NjT
127
119
  rslearn/train/callbacks/gradients.py,sha256=4YqCf0tBb6E5FnyFYbveXfQFlgNPyxIXb2FCWX4-6qs,5075
128
120
  rslearn/train/callbacks/peft.py,sha256=wEOKsS3RhsRaZTXn_Kz2wdsZdIiIaZPdCJWtdJBurT8,4156
129
121
  rslearn/train/tasks/__init__.py,sha256=dag1u72x1-me6y0YcOubUo5MYZ0Tjf6-dOir9UeFNMs,75
130
- rslearn/train/tasks/classification.py,sha256=DI0_Wzs-9rNPWokvfxi1BIA6QyqNee42SpptQx82WHM,13182
131
- rslearn/train/tasks/detection.py,sha256=OoZzC8ZbmhyZ30tD-4cB-3Jj0AN6Y7hg0wk27rDguCE,22297
122
+ rslearn/train/tasks/classification.py,sha256=kahVdXPU6fDwDCdqlrjZGb9uA-PYG74DbQQ0kJUt-Eg,13186
123
+ rslearn/train/tasks/detection.py,sha256=9j9webusrjGexvUmZ7gl3NTBS63Qq511VFlB2WbLi5Y,22302
124
+ rslearn/train/tasks/embedding.py,sha256=DK3l1aQ3d5gQUT1h3cD6vcUaNKvSsH26RHx2Bbzutbg,3667
132
125
  rslearn/train/tasks/multi_task.py,sha256=dBWsnbvQ0CReNsbDHmZ_-sXjUE0H4S2OPcbJwMquG9g,6016
133
- rslearn/train/tasks/per_pixel_regression.py,sha256=tkVntKFzPlWFxdupPlMfhIRWlJ0UCgxg_FGhcA2-wjE,8649
134
- rslearn/train/tasks/regression.py,sha256=_PoxOfWNseujD4IWsuTL82fAAXgtco4WdfkNXQ68Nbg,11497
126
+ rslearn/train/tasks/per_pixel_regression.py,sha256=W8dbLyIiPgFI3gA_aZQX0pSFRWLP2v6tthsFbKhcDVg,8783
127
+ rslearn/train/tasks/regression.py,sha256=zZhrrZ1qxjrdLjKWC9McRivDXCcKiYfdLC-kaMeVkDc,11547
135
128
  rslearn/train/tasks/segmentation.py,sha256=xEni3CLDyetviv84XrpJg5xeJU87WHGFKTVfIeemGIY,21868
136
129
  rslearn/train/tasks/task.py,sha256=4w2xKL_U5JAtdj2dYoVv82h6xTtgUsA3IvIOcXyZecs,3887
137
130
  rslearn/train/transforms/__init__.py,sha256=BkCAzm4f-8TEhPIuyvCj7eJGh36aMkZFYlq-H_jkSvY,778
@@ -140,7 +133,7 @@ rslearn/train/transforms/crop.py,sha256=4jA3JJsC0ghicPHbfsNJ0d3WpChyvftY73ONiwQa
140
133
  rslearn/train/transforms/flip.py,sha256=lkTeje3T8gNn2gt6957morXq1fGNho-apSpCvNp0_9o,3480
141
134
  rslearn/train/transforms/mask.py,sha256=pwt33XXWLwldLiar-PgVgBQzQd1qfL18SPz3LYQMoYM,2111
142
135
  rslearn/train/transforms/normalize.py,sha256=uyv2hE5hw5B2kCRHa4JIx0tfowm-C7bgumwINvvfyts,5014
143
- rslearn/train/transforms/pad.py,sha256=EDswS9KYRSloM3DQlbCz6S0WYqFQJvI433qMqTtqrZw,4686
136
+ rslearn/train/transforms/pad.py,sha256=pj4Ql8GSRrhg8KOZTNPB40Qq8CoCCHdGo04uficik84,4698
144
137
  rslearn/train/transforms/select_bands.py,sha256=uDfD9G8Z4VTt88QZsjj1FB20QEmzSefhKf7uDXYn77M,2441
145
138
  rslearn/train/transforms/sentinel1.py,sha256=FrLaYZs2AjqWQCun8DTFtgo1l0xLxqaFKtDNIehtpDg,1913
146
139
  rslearn/train/transforms/transform.py,sha256=n1Qzqix2dVvej-Q7iPzHeOQbqH79IBlvqPoymxhNVpE,4446
@@ -153,15 +146,16 @@ rslearn/utils/get_utm_ups_crs.py,sha256=kUrcyjCK7KWvuP1XR-nURPeRqYeRO-3L8QUJ1QTF
153
146
  rslearn/utils/grid_index.py,sha256=hRmrtgpqN1pLa-djnZtgSXqKJlbgGyttGnCEmPLD0zo,2347
154
147
  rslearn/utils/jsonargparse.py,sha256=JcTKQoZ6jgwag-kSeTIEVBO9AsRj0X1oEJBsoaCazH4,658
155
148
  rslearn/utils/mp.py,sha256=XYmVckI5TOQuCKc49NJyirDJyFgvb4AI-gGypG2j680,1399
156
- rslearn/utils/raster_format.py,sha256=dBTSa8l6Ms9Ndbx9Krgqm9z4RU7j2hwLBkw2w-KibU4,26009
149
+ rslearn/utils/raster_format.py,sha256=RDzDPnWUJunqcj-F4oXKBl-rKFBUpRjvq7mMYhid3iU,27413
157
150
  rslearn/utils/rtree_index.py,sha256=j0Zwrq3pXuAJ-hKpiRFQ7VNtvO3fZYk-Em2uBPAqfx4,6460
158
151
  rslearn/utils/spatial_index.py,sha256=eomJAUgzmjir8j9HZnSgQoJHwN9H0wGTjmJkMkLLfsU,762
159
152
  rslearn/utils/sqlite_index.py,sha256=YGOJi66544e6JNtfSft6YIlHklFdSJO2duxQ4TJ2iu4,2920
160
153
  rslearn/utils/time.py,sha256=2ilSLG94_sxLP3y5RSV5L5CG8CoND_dbdzYEHVtN-I8,387
161
154
  rslearn/utils/vector_format.py,sha256=EIChYCL6GLOILS2TO2JBkca1TuaWsSubWv6iRS3P2ds,16139
162
- rslearn-0.0.11.dist-info/licenses/LICENSE,sha256=_99ZWPoLdlUbqZoSC5DF4ihiNwl5rTEmBaq2fACecdg,11352
163
- rslearn-0.0.11.dist-info/METADATA,sha256=jwB0ZZ-oLa1Y_1iuZRKCQoB4i3kOFDJ0xSeMTJP7zww,36297
164
- rslearn-0.0.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
165
- rslearn-0.0.11.dist-info/entry_points.txt,sha256=doTBQ57NT7nq-dgYGgTTw6mafcGWb_4PWYtYR4rGm50,46
166
- rslearn-0.0.11.dist-info/top_level.txt,sha256=XDKo90WBH8P9RQumHxo0giLJsoufT4r9odv-WE6Ahk4,8
167
- rslearn-0.0.11.dist-info/RECORD,,
155
+ rslearn-0.0.13.dist-info/licenses/LICENSE,sha256=_99ZWPoLdlUbqZoSC5DF4ihiNwl5rTEmBaq2fACecdg,11352
156
+ rslearn-0.0.13.dist-info/licenses/NOTICE,sha256=wLPr6rwV_jCg-xEknNGwhnkfRfuoOE9MZ-lru2yZyLI,5070
157
+ rslearn-0.0.13.dist-info/METADATA,sha256=44oDmbvkIrjJ0unVNaYeO5OypD6RavmG7l5HUz9Re48,36319
158
+ rslearn-0.0.13.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
159
+ rslearn-0.0.13.dist-info/entry_points.txt,sha256=doTBQ57NT7nq-dgYGgTTw6mafcGWb_4PWYtYR4rGm50,46
160
+ rslearn-0.0.13.dist-info/top_level.txt,sha256=XDKo90WBH8P9RQumHxo0giLJsoufT4r9odv-WE6Ahk4,8
161
+ rslearn-0.0.13.dist-info/RECORD,,
@@ -0,0 +1,115 @@
1
+ rslearn is released under Apache License 2.0
2
+ Copyright 2025 Allen Institute for AI
3
+
4
+ The following third party code is included in this repository.
5
+
6
+ ====================
7
+
8
+ rslearn.models.detr is adapted from https://github.com/facebookresearch/detr which is
9
+ released under Apache License 2.0.
10
+
11
+ Copyright 2020 - present, Facebook, Inc
12
+
13
+ ====================
14
+
15
+ rslearn.models.use_croma is copied from https://github.com/antofuller/CROMA
16
+
17
+ MIT License
18
+
19
+ Copyright (c) 2023 Anthony Fuller
20
+
21
+ Permission is hereby granted, free of charge, to any person obtaining a copy
22
+ of this software and associated documentation files (the "Software"), to deal
23
+ in the Software without restriction, including without limitation the rights
24
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
25
+ copies of the Software, and to permit persons to whom the Software is
26
+ furnished to do so, subject to the following conditions:
27
+
28
+ The above copyright notice and this permission notice shall be included in all
29
+ copies or substantial portions of the Software.
30
+
31
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
32
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
33
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
34
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
35
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
36
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
37
+ SOFTWARE.
38
+
39
+ ====================
40
+
41
+ rslearn.models.galileo is adapted from https://github.com/nasaharvest/galileo
42
+
43
+ MIT License
44
+
45
+ Copyright (c) 2024 Presto Authors
46
+
47
+ Permission is hereby granted, free of charge, to any person obtaining a copy
48
+ of this software and associated documentation files (the "Software"), to deal
49
+ in the Software without restriction, including without limitation the rights
50
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
51
+ copies of the Software, and to permit persons to whom the Software is
52
+ furnished to do so, subject to the following conditions:
53
+
54
+ The above copyright notice and this permission notice shall be included in all
55
+ copies or substantial portions of the Software.
56
+
57
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
58
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
59
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
60
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
61
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
62
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
63
+ SOFTWARE.
64
+
65
+ ====================
66
+
67
+ rslearn.models.presto is adapted from https://github.com/nasaharvest/presto
68
+
69
+ MIT License
70
+
71
+ Copyright (c) 2024 Presto Authors
72
+
73
+ Permission is hereby granted, free of charge, to any person obtaining a copy
74
+ of this software and associated documentation files (the "Software"), to deal
75
+ in the Software without restriction, including without limitation the rights
76
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
77
+ copies of the Software, and to permit persons to whom the Software is
78
+ furnished to do so, subject to the following conditions:
79
+
80
+ The above copyright notice and this permission notice shall be included in all
81
+ copies or substantial portions of the Software.
82
+
83
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
84
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
85
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
86
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
87
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
88
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
89
+ SOFTWARE.
90
+
91
+ ====================
92
+
93
+ rslearn.models.prithvi includes code adapted from https://github.com/NASA-IMPACT/Prithvi-WxC
94
+
95
+ MIT License
96
+
97
+ Copyright (c) 2024 Inter Agency Implementation and Advanced Concepts
98
+
99
+ Permission is hereby granted, free of charge, to any person obtaining a copy
100
+ of this software and associated documentation files (the "Software"), to deal
101
+ in the Software without restriction, including without limitation the rights
102
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
103
+ copies of the Software, and to permit persons to whom the Software is
104
+ furnished to do so, subject to the following conditions:
105
+
106
+ The above copyright notice and this permission notice shall be included in all
107
+ copies or substantial portions of the Software.
108
+
109
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
110
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
111
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
112
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
113
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
114
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
115
+ SOFTWARE.