Skip to content

Commit 926fdf4

Browse files
author
Hitesh Tolani
committed
Fixed tests directory
1 parent a0c9fe5 commit 926fdf4

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

79 files changed

+440
-194
lines changed

tests/conf/ssl4eo_l_moco_1.yaml

+6
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,12 @@ model:
88
temperature: 0.07
99
memory_bank_size: 10
1010
moco_momentum: 0.999
11+
augmentation1:
12+
class_path: kornia.augmentation.RandomResizedCrop
13+
init_args:
14+
size:
15+
- 224
16+
- 224
1117
data:
1218
class_path: SSL4EOLDataModule
1319
init_args:
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
{"type": "FeatureCollection", "name": "Aboveground_Live_Woody_Biomass_Density", "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, "features": [{"type": "Feature", "properties": {"tile_id": "00N_000E", "download": "tests/data/agb_live_woody_density/00N_000E.tif", "ObjectId": 1, "Shape__Area": 1245542622548.87, "Shape__Length": 4464169.76558139}, "geometry": {"type": "Polygon", "coordinates": [[[0.0, 0.0], [10.0, 0.0], [10.0, -10.0], [0.0, -10.0], [0.0, 0.0]]]}}]}
1+
{"type": "FeatureCollection", "name": "Aboveground_Live_Woody_Biomass_Density", "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, "features": [{"type": "Feature", "properties": {"tile_id": "00N_000E", "Mg_px_1_download": "tests/data/agb_live_woody_density/00N_000E.tif", "ObjectId": 1, "Shape__Area": 1245542622548.87, "Shape__Length": 4464169.76558139}, "geometry": {"type": "Polygon", "coordinates": [[[0.0, 0.0], [10.0, 0.0], [10.0, -10.0], [0.0, -10.0], [0.0, 0.0]]]}}]}

tests/data/agb_live_woody_density/data.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
"type": "Feature",
2424
"properties": {
2525
"tile_id": "00N_000E",
26-
"download": os.path.join(
26+
"Mg_px_1_download": os.path.join(
2727
"tests", "data", "agb_live_woody_density", "00N_000E.tif"
2828
),
2929
"ObjectId": 1,
@@ -74,5 +74,5 @@ def create_file(path: str, dtype: str, num_channels: int) -> None:
7474
json.dump(base_file, f)
7575

7676
for i in base_file["features"]:
77-
filepath = os.path.basename(i["properties"]["download"])
77+
filepath = os.path.basename(i["properties"]["Mg_px_1_download"])
7878
create_file(path=filepath, dtype="int32", num_channels=1)

tests/data/nccm/13090442.zip

2.97 KB
Binary file not shown.
853 Bytes
Binary file not shown.
854 Bytes
Binary file not shown.
857 Bytes
Binary file not shown.

tests/data/nccm/data.py

+67
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
#!/usr/bin/env python3
2+
3+
# Copyright (c) Microsoft Corporation. All rights reserved.
4+
# Licensed under the MIT License.
5+
6+
import hashlib
7+
import os
8+
import shutil
9+
10+
import numpy as np
11+
import rasterio
12+
from rasterio.crs import CRS
13+
from rasterio.transform import Affine
14+
15+
SIZE = 32
16+
17+
np.random.seed(0)
18+
files = ["CDL2017_clip.tif", "CDL2018_clip1.tif", "CDL2019_clip.tif"]
19+
20+
21+
def create_file(path: str, dtype: str):
22+
"""Create the testing file."""
23+
profile = {
24+
"driver": "GTiff",
25+
"dtype": dtype,
26+
"count": 1,
27+
"crs": CRS.from_epsg(4326),
28+
"transform": Affine(
29+
8.983152841195208e-05,
30+
0.0,
31+
115.483402043364,
32+
0.0,
33+
-8.983152841195208e-05,
34+
53.531397320113605,
35+
),
36+
"height": SIZE,
37+
"width": SIZE,
38+
"compress": "lzw",
39+
"predictor": 2,
40+
}
41+
42+
allowed_values = [0, 1, 2, 3, 15]
43+
44+
Z = np.random.choice(allowed_values, size=(SIZE, SIZE))
45+
46+
with rasterio.open(path, "w", **profile) as src:
47+
src.write(Z, 1)
48+
49+
50+
if __name__ == "__main__":
51+
dir = os.path.join(os.getcwd(), "13090442")
52+
53+
if os.path.exists(dir) and os.path.isdir(dir):
54+
shutil.rmtree(dir)
55+
56+
os.makedirs(dir, exist_ok=True)
57+
58+
for file in files:
59+
create_file(os.path.join(dir, file), dtype="int8")
60+
61+
# Compress data
62+
shutil.make_archive("13090442", "zip", ".", dir)
63+
64+
# Compute checksums
65+
with open("13090442.zip", "rb") as f:
66+
md5 = hashlib.md5(f.read()).hexdigest()
67+
print(f"13090442.zip: {md5}")

tests/datasets/test_advance.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from pytest import MonkeyPatch
1515

1616
import torchgeo.datasets.utils
17-
from torchgeo.datasets import ADVANCE
17+
from torchgeo.datasets import ADVANCE, DatasetNotFoundError
1818

1919

2020
def download_url(url: str, root: str, *args: str) -> None:
@@ -68,7 +68,7 @@ def test_already_downloaded(self, dataset: ADVANCE) -> None:
6868
ADVANCE(root=dataset.root, download=True)
6969

7070
def test_not_downloaded(self, tmp_path: Path) -> None:
71-
with pytest.raises(RuntimeError, match="Dataset not found or corrupted."):
71+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
7272
ADVANCE(str(tmp_path))
7373

7474
def test_mock_missing_module(

tests/datasets/test_agb_live_woody_density.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import torchgeo
1616
from torchgeo.datasets import (
1717
AbovegroundLiveWoodyBiomassDensity,
18+
DatasetNotFoundError,
1819
IntersectionDataset,
1920
UnionDataset,
2021
)
@@ -53,7 +54,7 @@ def test_getitem(self, dataset: AbovegroundLiveWoodyBiomassDensity) -> None:
5354
assert isinstance(x["mask"], torch.Tensor)
5455

5556
def test_no_dataset(self, tmp_path: Path) -> None:
56-
with pytest.raises(RuntimeError, match="Dataset not found"):
57+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
5758
AbovegroundLiveWoodyBiomassDensity(str(tmp_path))
5859

5960
def test_already_downloaded(

tests/datasets/test_astergdem.py

+8-2
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,13 @@
1111
import torch.nn as nn
1212
from rasterio.crs import CRS
1313

14-
from torchgeo.datasets import AsterGDEM, BoundingBox, IntersectionDataset, UnionDataset
14+
from torchgeo.datasets import (
15+
AsterGDEM,
16+
BoundingBox,
17+
DatasetNotFoundError,
18+
IntersectionDataset,
19+
UnionDataset,
20+
)
1521

1622

1723
class TestAsterGDEM:
@@ -26,7 +32,7 @@ def dataset(self, tmp_path: Path) -> AsterGDEM:
2632
def test_datasetmissing(self, tmp_path: Path) -> None:
2733
shutil.rmtree(tmp_path)
2834
os.makedirs(tmp_path)
29-
with pytest.raises(RuntimeError, match="Dataset not found in"):
35+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
3036
AsterGDEM(str(tmp_path))
3137

3238
def test_getitem(self, dataset: AsterGDEM) -> None:

tests/datasets/test_benin_cashews.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from pytest import MonkeyPatch
1414
from torch.utils.data import ConcatDataset
1515

16-
from torchgeo.datasets import BeninSmallHolderCashews
16+
from torchgeo.datasets import BeninSmallHolderCashews, DatasetNotFoundError
1717

1818

1919
class Collection:
@@ -73,7 +73,7 @@ def test_already_downloaded(self, dataset: BeninSmallHolderCashews) -> None:
7373
BeninSmallHolderCashews(root=dataset.root, download=True, api_key="")
7474

7575
def test_not_downloaded(self, tmp_path: Path) -> None:
76-
with pytest.raises(RuntimeError, match="Dataset not found or corrupted."):
76+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
7777
BeninSmallHolderCashews(str(tmp_path))
7878

7979
def test_invalid_bands(self) -> None:

tests/datasets/test_bigearthnet.py

+2-5
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from pytest import MonkeyPatch
1414

1515
import torchgeo.datasets.utils
16-
from torchgeo.datasets import BigEarthNet
16+
from torchgeo.datasets import BigEarthNet, DatasetNotFoundError
1717

1818

1919
def download_url(url: str, root: str, *args: str, **kwargs: str) -> None:
@@ -134,10 +134,7 @@ def test_already_downloaded_not_extracted(
134134
)
135135

136136
def test_not_downloaded(self, tmp_path: Path) -> None:
137-
err = "Dataset not found in `root` directory and `download=False`, "
138-
"either specify a different `root` directory or use `download=True` "
139-
"to automatically download the dataset."
140-
with pytest.raises(RuntimeError, match=err):
137+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
141138
BigEarthNet(str(tmp_path))
142139

143140
def test_plot(self, dataset: BigEarthNet) -> None:

tests/datasets/test_biomassters.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
import pytest
1111
from _pytest.fixtures import SubRequest
1212

13-
from torchgeo.datasets import BioMassters
13+
from torchgeo.datasets import BioMassters, DatasetNotFoundError
1414

1515

1616
class TestBioMassters:
@@ -36,8 +36,7 @@ def test_invalid_bands(self, dataset: BioMassters) -> None:
3636
BioMassters(dataset.root, sensors=["S3"])
3737

3838
def test_not_downloaded(self, tmp_path: Path) -> None:
39-
match = "Dataset not found"
40-
with pytest.raises(RuntimeError, match=match):
39+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
4140
BioMassters(str(tmp_path))
4241

4342
def test_plot(self, dataset: BioMassters) -> None:

tests/datasets/test_cbf.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from torchgeo.datasets import (
1717
BoundingBox,
1818
CanadianBuildingFootprints,
19+
DatasetNotFoundError,
1920
IntersectionDataset,
2021
UnionDataset,
2122
)
@@ -75,7 +76,7 @@ def test_plot_prediction(self, dataset: CanadianBuildingFootprints) -> None:
7576
dataset.plot(x, suptitle="Prediction")
7677

7778
def test_not_downloaded(self, tmp_path: Path) -> None:
78-
with pytest.raises(RuntimeError, match="Dataset not found or corrupted."):
79+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
7980
CanadianBuildingFootprints(str(tmp_path))
8081

8182
def test_invalid_query(self, dataset: CanadianBuildingFootprints) -> None:

tests/datasets/test_cdl.py

+8-2
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,13 @@
1515
from rasterio.crs import CRS
1616

1717
import torchgeo.datasets.utils
18-
from torchgeo.datasets import CDL, BoundingBox, IntersectionDataset, UnionDataset
18+
from torchgeo.datasets import (
19+
CDL,
20+
BoundingBox,
21+
DatasetNotFoundError,
22+
IntersectionDataset,
23+
UnionDataset,
24+
)
1925

2026

2127
def download_url(url: str, root: str, *args: str, **kwargs: str) -> None:
@@ -111,7 +117,7 @@ def test_plot_prediction(self, dataset: CDL) -> None:
111117
plt.close()
112118

113119
def test_not_downloaded(self, tmp_path: Path) -> None:
114-
with pytest.raises(RuntimeError, match="Dataset not found"):
120+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
115121
CDL(str(tmp_path))
116122

117123
def test_invalid_query(self, dataset: CDL) -> None:

tests/datasets/test_chesapeake.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
BoundingBox,
1919
Chesapeake13,
2020
ChesapeakeCVPR,
21+
DatasetNotFoundError,
2122
IntersectionDataset,
2223
UnionDataset,
2324
)
@@ -70,7 +71,7 @@ def test_already_downloaded(self, tmp_path: Path) -> None:
7071
Chesapeake13(root)
7172

7273
def test_not_downloaded(self, tmp_path: Path) -> None:
73-
with pytest.raises(RuntimeError, match="Dataset not found"):
74+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
7475
Chesapeake13(str(tmp_path), checksum=True)
7576

7677
def test_plot(self, dataset: Chesapeake13) -> None:
@@ -193,7 +194,7 @@ def test_already_downloaded(self, tmp_path: Path) -> None:
193194
ChesapeakeCVPR(root)
194195

195196
def test_not_downloaded(self, tmp_path: Path) -> None:
196-
with pytest.raises(RuntimeError, match="Dataset not found"):
197+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
197198
ChesapeakeCVPR(str(tmp_path), checksum=True)
198199

199200
def test_out_of_bounds_query(self, dataset: ChesapeakeCVPR) -> None:

tests/datasets/test_cloud_cover.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
import torch.nn as nn
1313
from pytest import MonkeyPatch
1414

15-
from torchgeo.datasets import CloudCoverDetection
15+
from torchgeo.datasets import CloudCoverDetection, DatasetNotFoundError
1616

1717

1818
class Collection:
@@ -83,7 +83,7 @@ def test_already_downloaded(self, dataset: CloudCoverDetection) -> None:
8383
CloudCoverDetection(root=dataset.root, split="test", download=True, api_key="")
8484

8585
def test_not_downloaded(self, tmp_path: Path) -> None:
86-
with pytest.raises(RuntimeError, match="Dataset not found or corrupted."):
86+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
8787
CloudCoverDetection(str(tmp_path))
8888

8989
def test_plot(self, dataset: CloudCoverDetection) -> None:

tests/datasets/test_cms_mangrove_canopy.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,12 @@
1212
from pytest import MonkeyPatch
1313
from rasterio.crs import CRS
1414

15-
from torchgeo.datasets import CMSGlobalMangroveCanopy, IntersectionDataset, UnionDataset
15+
from torchgeo.datasets import (
16+
CMSGlobalMangroveCanopy,
17+
DatasetNotFoundError,
18+
IntersectionDataset,
19+
UnionDataset,
20+
)
1621

1722

1823
def download_url(url: str, root: str, *args: str, **kwargs: str) -> None:
@@ -45,7 +50,7 @@ def test_getitem(self, dataset: CMSGlobalMangroveCanopy) -> None:
4550
assert isinstance(x["mask"], torch.Tensor)
4651

4752
def test_no_dataset(self, tmp_path: Path) -> None:
48-
with pytest.raises(RuntimeError, match="Dataset not found"):
53+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
4954
CMSGlobalMangroveCanopy(str(tmp_path))
5055

5156
def test_already_downloaded(self, tmp_path: Path) -> None:

tests/datasets/test_cowc.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,7 @@
1414
from torch.utils.data import ConcatDataset
1515

1616
import torchgeo.datasets.utils
17-
from torchgeo.datasets import COWCCounting, COWCDetection
18-
from torchgeo.datasets.cowc import COWC
17+
from torchgeo.datasets import COWC, COWCCounting, COWCDetection, DatasetNotFoundError
1918

2019

2120
def download_url(url: str, root: str, *args: str, **kwargs: str) -> None:
@@ -78,7 +77,7 @@ def test_invalid_split(self) -> None:
7877
COWCCounting(split="foo")
7978

8079
def test_not_downloaded(self, tmp_path: Path) -> None:
81-
with pytest.raises(RuntimeError, match="Dataset not found or corrupted."):
80+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
8281
COWCCounting(str(tmp_path))
8382

8483
def test_plot(self, dataset: COWCCounting) -> None:
@@ -142,7 +141,7 @@ def test_invalid_split(self) -> None:
142141
COWCDetection(split="foo")
143142

144143
def test_not_downloaded(self, tmp_path: Path) -> None:
145-
with pytest.raises(RuntimeError, match="Dataset not found or corrupted."):
144+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
146145
COWCDetection(str(tmp_path))
147146

148147
def test_plot(self, dataset: COWCDetection) -> None:

tests/datasets/test_cv4a_kenya_crop_type.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from pytest import MonkeyPatch
1414
from torch.utils.data import ConcatDataset
1515

16-
from torchgeo.datasets import CV4AKenyaCropType
16+
from torchgeo.datasets import CV4AKenyaCropType, DatasetNotFoundError
1717

1818

1919
class Collection:
@@ -84,7 +84,7 @@ def test_already_downloaded(self, dataset: CV4AKenyaCropType) -> None:
8484
CV4AKenyaCropType(root=dataset.root, download=True, api_key="")
8585

8686
def test_not_downloaded(self, tmp_path: Path) -> None:
87-
with pytest.raises(RuntimeError, match="Dataset not found or corrupted."):
87+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
8888
CV4AKenyaCropType(str(tmp_path))
8989

9090
def test_invalid_tile(self, dataset: CV4AKenyaCropType) -> None:

tests/datasets/test_cyclone.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from pytest import MonkeyPatch
1515
from torch.utils.data import ConcatDataset
1616

17-
from torchgeo.datasets import TropicalCyclone
17+
from torchgeo.datasets import DatasetNotFoundError, TropicalCyclone
1818

1919

2020
class Collection:
@@ -80,7 +80,7 @@ def test_invalid_split(self) -> None:
8080
TropicalCyclone(split="foo")
8181

8282
def test_not_downloaded(self, tmp_path: Path) -> None:
83-
with pytest.raises(RuntimeError, match="Dataset not found or corrupted."):
83+
with pytest.raises(DatasetNotFoundError, match="Dataset not found"):
8484
TropicalCyclone(str(tmp_path))
8585

8686
def test_plot(self, dataset: TropicalCyclone) -> None:

0 commit comments

Comments
 (0)