Skip to content

Commit

Permalink
Revert unwanted merge (aramis-lab#672)
Browse files Browse the repository at this point in the history
  • Loading branch information
thibaultdvx authored and camillebrianceau committed Nov 7, 2024
1 parent 8e122ab commit 255cb22
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 9 deletions.
4 changes: 2 additions & 2 deletions clinicadl/dataset/caps_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -580,11 +580,11 @@ def _get_mask_paths_and_tensors(
else:
for template_ in Template:
if preprocessing_.name == template_.name:
template_name = template_.value
template_name = template_

for pattern_ in Pattern:
if preprocessing_.name == pattern_.name:
pattern = pattern_.value
pattern = pattern_

mask_location = caps_directory / "masks" / f"tpl-{template_name}"

Expand Down
2 changes: 1 addition & 1 deletion clinicadl/networks/old_network/cnn/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def fc_dict_design(n_fcblocks, convolutions, initial_shape, n_classes=2):
out_channels = last_conv["out_channels"]
flattened_shape = np.ceil(np.array(initial_shape) / 2**n_conv)
flattened_shape[0] = out_channels
in_features = np.prod(flattened_shape)
in_features = np.product(flattened_shape)

# Sample number of FC layers
ratio = (in_features / n_classes) ** (1 / n_fcblocks)
Expand Down
22 changes: 16 additions & 6 deletions clinicadl/trainer/tasks_utils.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,31 @@
from abc import abstractmethod
from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union

import numpy as np
import pandas as pd
import torch
import torch.distributed as dist
from pydantic import (
BaseModel,
ConfigDict,
computed_field,
model_validator,
)
from torch import Tensor, nn
from torch.amp import autocast
from torch.nn.functional import softmax
from torch.utils.data import Sampler, sampler
from torch.nn.modules.loss import _Loss
from torch.utils.data import DataLoader, Sampler, sampler
from torch.utils.data.distributed import DistributedSampler

from clinicadl.dataset.caps_dataset import CapsDataset
from clinicadl.metrics.old_metrics.metric_module import MetricModule
from clinicadl.networks.old_network.network import Network
from clinicadl.trainer.config.train import TrainConfig
from clinicadl.utils import cluster
from clinicadl.utils.enum import (
ClassificationLoss,
ClassificationMetric,
Mode,
ReconstructionLoss,
ReconstructionMetric,
RegressionLoss,
Expand Down Expand Up @@ -239,7 +249,7 @@ def save_outputs(network_task: Union[str, Task]):

def generate_test_row(
network_task: Union[str, Task],
mode: Mode,
mode: str,
metrics_module,
n_classes: int,
idx: int,
Expand All @@ -264,7 +274,7 @@ def generate_test_row(
[
data["participant_id"][idx],
data["session_id"][idx],
data[f"{mode.value}_id"][idx].item(),
data[f"{mode}_id"][idx].item(),
data["label"][idx].item(),
prediction,
]
Expand All @@ -276,7 +286,7 @@ def generate_test_row(
[
data["participant_id"][idx],
data["session_id"][idx],
data[f"{mode.value}_id"][idx].item(),
data[f"{mode}_id"][idx].item(),
data["label"][idx].item(),
outputs[idx].item(),
]
Expand All @@ -288,7 +298,7 @@ def generate_test_row(
row = [
data["participant_id"][idx],
data["session_id"][idx],
data[f"{mode.value}_id"][idx].item(),
data[f"{mode}_id"][idx].item(),
]

for metric in evaluation_metrics(Task.RECONSTRUCTION):
Expand Down

0 comments on commit 255cb22

Please sign in to comment.