From 83267b9cead202be88b5d97dfbe7b854201cf581 Mon Sep 17 00:00:00 2001 From: sophmrtn <44570734+sophmrtn@users.noreply.github.com> Date: Wed, 3 Jul 2024 12:00:40 +0100 Subject: [PATCH] Fix embedding error for models with no notes --- src/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/models.py b/src/models.py index cf7e296..6b1bfc7 100644 --- a/src/models.py +++ b/src/models.py @@ -24,7 +24,7 @@ def forward(self, input_): class Gate(nn.Module): - # Adapted from https://github.com/emnlp-mimic/mimic/blob/main/base.py#L136 inspired by https://ieeexplore.ieee.org/document/9746536 + # Adapted from https://github.com/emnlp-mimic/mimic/blob/main/base.py#L136 inspired by https://arxiv.org/pdf/1908.05787 def __init__(self, inp1_size, inp2_size, inp3_size: int = 0, dropout: int = 0): super().__init__() @@ -105,7 +105,7 @@ def __init__( self.embed_notes = nn.Linear(nt_input_dim, nt_embed_dim) else: self.embed_notes = None - self.nt_embed_dim = 0 + nt_embed_dim = 0 if self.fusion_method == "mag": if self.st_first: