Source code for torchgeo.datamodules.deepglobelandcover
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""DeepGlobe Land Cover Classification Challenge datamodule."""
from typing import Any, Dict, Optional
import pytorch_lightning as pl
from torch.utils.data import DataLoader, Dataset
from torchvision.transforms import Compose
from ..datasets import DeepGlobeLandCover
from .utils import dataset_split
class DeepGlobeLandCoverDataModule(pl.LightningDataModule):
"""LightningDataModule implementation for the DeepGlobe Land Cover dataset.
Uses the train/test splits from the dataset.
"""
[docs] def __init__(
self,
root_dir: str,
batch_size: int = 64,
num_workers: int = 0,
val_split_pct: float = 0.2,
**kwargs: Any,
) -> None:
"""Initialize a LightningDataModule for DeepGlobe Land Cover based DataLoaders.
Args:
root_dir: The ``root`` argument to pass to the DeepGlobe Dataset classes
batch_size: The batch size to use in all created DataLoaders
num_workers: The number of workers to use in all created DataLoaders
val_split_pct: What percentage of the dataset to use as a validation set
"""
super().__init__()
self.root_dir = root_dir
self.batch_size = batch_size
self.num_workers = num_workers
self.val_split_pct = val_split_pct
[docs] def preprocess(self, sample: Dict[str, Any]) -> Dict[str, Any]:
"""Transform a single sample from the Dataset.
Args:
sample: input image dictionary
Returns:
preprocessed sample
"""
sample["image"] = sample["image"].float()
sample["image"] /= 255.0
return sample
[docs] def setup(self, stage: Optional[str] = None) -> None:
"""Initialize the main ``Dataset`` objects.
This method is called once per GPU per run.
Args:
stage: stage to set up
"""
transforms = Compose([self.preprocess])
dataset = DeepGlobeLandCover(self.root_dir, "train", transforms=transforms)
self.train_dataset: Dataset[Any]
self.val_dataset: Dataset[Any]
if self.val_split_pct > 0.0:
self.train_dataset, self.val_dataset, _ = dataset_split(
dataset, val_pct=self.val_split_pct, test_pct=0.0
)
else:
self.train_dataset = dataset
self.val_dataset = dataset
self.test_dataset = DeepGlobeLandCover(
self.root_dir, "test", transforms=transforms
)
[docs] def train_dataloader(self) -> DataLoader[Dict[str, Any]]:
"""Return a DataLoader for training.
Returns:
training data loader
"""
return DataLoader(
self.train_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=True,
)
[docs] def val_dataloader(self) -> DataLoader[Dict[str, Any]]:
"""Return a DataLoader for validation.
Returns:
validation data loader
"""
return DataLoader(
self.val_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=False,
)
[docs] def test_dataloader(self) -> DataLoader[Dict[str, Any]]:
"""Return a DataLoader for testing.
Returns:
testing data loader
"""
return DataLoader(
self.test_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=False,
)