diff --git a/README.md b/README.md index b340113..9d8e853 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,6 @@ This repo holds the object detector and feature extractor for running things. > [!IMPORTANT] > If you have questions or find bugs or anything, you can contact us in our [organisation's discussion](https://github.com/orgs/emma-heriot-watt/discussions). - ## Writing code and running things ### Run the server for the [Alexa Arena](https://github.com/amazon-science/alexa-arena) diff --git a/pyproject.toml b/pyproject.toml index 98df7ae..dd8bbb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,11 +39,6 @@ cmd = "pytest -v --junitxml=pytest.xml --cov=src -m 'not slow and not multiproce help = "Update torch to use the latest CUDA version" shell = "python scripts/update_torch_cuda.py" -[[tool.poe.tasks.postinstall]] -help = 'Update torch and install maskrcnn-benchmark and scene-graph-benchmark' -shell = """ - pip install git+https://github.com/emma-simbot/scene_graph_benchmark - """ [tool.poetry.dependencies] python = ">=3.9,<3.10" diff --git a/src/emma_perception/models/simbot_entity_classifier.py b/src/emma_perception/models/simbot_entity_classifier.py index 741e5be..b3ab5b2 100644 --- a/src/emma_perception/models/simbot_entity_classifier.py +++ b/src/emma_perception/models/simbot_entity_classifier.py @@ -177,12 +177,12 @@ def __init__( def make_layers(self) -> Sequential: """Make a simple 2 layer MLP.""" - layers = [] + layers: list[torch.nn.Module] = [] - layers.append(Linear(self._in_features, self._hidden_dim)) # type: ignore[arg-type] - layers.append(BatchNorm1d(self._hidden_dim)) # type: ignore[arg-type] - layers.append(Dropout(self._dropout)) # type: ignore[arg-type] - layers.append(Linear(self._hidden_dim, self._num_classes)) # type: ignore[arg-type] + layers.append(Linear(self._in_features, self._hidden_dim)) + layers.append(BatchNorm1d(self._hidden_dim)) + layers.append(Dropout(self._dropout)) + layers.append(Linear(self._hidden_dim, self._num_classes)) return Sequential(*layers)