diff --git a/src/eva/data/datamodules/datamodule.py b/src/eva/data/datamodules/datamodule.py index d4873d10..37d3b23f 100644 --- a/src/eva/data/datamodules/datamodule.py +++ b/src/eva/data/datamodules/datamodule.py @@ -27,8 +27,8 @@ def __init__( """Initializes the datamodule. Args: - datasets: The desired datasets. Defaults to `None`. - dataloaders: The desired dataloaders. Defaults to `None`. + datasets: The desired datasets. + dataloaders: The desired dataloaders. """ super().__init__() diff --git a/src/eva/metrics/core/module.py b/src/eva/metrics/core/module.py index d9c69b6b..ea7db5fa 100644 --- a/src/eva/metrics/core/module.py +++ b/src/eva/metrics/core/module.py @@ -53,7 +53,7 @@ def from_metrics( val: Metrics for the validation stage. test: Metrics for the test stage. separator: The separator between the group name of the metric - and the metric itself. Defaults to `"/"`. + and the metric itself. """ return cls( train=_create_collection_from_metrics(train, prefix="train" + separator), @@ -73,7 +73,7 @@ def from_schema( Args: schema: The dataclass metric schema. separator: The separator between the group name of the metric - and the metric itself. Defaults to `"/"`. + and the metric itself. """ return cls.from_metrics( train=schema.training_metrics, @@ -105,7 +105,7 @@ def _create_collection_from_metrics( Args: metrics: The desired metrics. - prefix: A prefix to added to the collection. Defaults to `None`. + prefix: A prefix to added to the collection. Returns: The resulted metrics collection. diff --git a/src/eva/models/modules/head.py b/src/eva/models/modules/head.py index 4eb6906a..d6f5036a 100644 --- a/src/eva/models/modules/head.py +++ b/src/eva/models/modules/head.py @@ -40,13 +40,10 @@ def __init__( criterion: The loss function to use. backbone: The feature extractor. If `None`, it will be expected that the input batch returns the features directly. - Defaults to `None`. optimizer: The optimizer to use. - Defaults to :class:`torch.optim.Adam`. lr_scheduler: The learning rate scheduler to use. - Defaults to :class:`torch.optim.lr_scheduler.ConstantLR`. metrics: The list of metrics to track. If `None`, it uses - the :meth:`self.default_metrics`. Defaults to `None`. + the :meth:`self.default_metrics`. """ super().__init__(metrics=metrics) diff --git a/src/eva/models/modules/module.py b/src/eva/models/modules/module.py index c1e48bac..7041c7ef 100644 --- a/src/eva/models/modules/module.py +++ b/src/eva/models/modules/module.py @@ -23,7 +23,7 @@ def __init__( """Initializes the basic module. Args: - metrics: The metrics schema. Defaults to `None`. + metrics: The metrics schema. """ super().__init__() @@ -131,7 +131,7 @@ def _update_metrics( Args: metrics: The desired metrics tracker to update. batch_outputs: The outputs of the batch processing step. - dataloader_idx: The dataloader index. Defaults to `0`. + dataloader_idx: The dataloader index. """ inputs = self._parse_metrics_inputs(batch_outputs, dataloader_idx) metrics.update(**inputs) @@ -159,7 +159,7 @@ def _parse_metrics_inputs( Args: batch_outputs: The outputs of the batch processing step. - dataloader_idx: The dataloader index. Defaults to `0`. + dataloader_idx: The dataloader index. Returns: A mapping with the argument name and its value.