Skip to content

Commit

Permalink
Tests
Browse files Browse the repository at this point in the history
  • Loading branch information
k1o0 committed Nov 14, 2024
1 parent 15b308d commit a8aa9ea
Show file tree
Hide file tree
Showing 7 changed files with 129 additions and 12 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,6 @@ this setup will work on other systems. Assumptions made are that you have sudo p
- `./manage.py test` test with migrations (recommended if model changes)
- NB: When running tests ensure `DEBUG = True` in the settings.py file (specifically `SECURE_SSL_REDIRECT = True` causes REST tests to fail)

```
$ /manage.py test -n
```shell
./manage.py test -n
```
7 changes: 7 additions & 0 deletions alyx/actions/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,13 @@ class SessionDatasetsSerializer(serializers.ModelSerializer):
default_revision = serializers.CharField(source='default_dataset')
qc = BaseSerializerEnumField(required=False)

def to_representation(self, instance):
"""Override the default to_representation method to null the revision field."""
representation = super().to_representation(instance)
if representation.get('revision') is None:
representation['revision'] = ''
return representation

class Meta:
list_serializer_class = FilterDatasetSerializer
model = Dataset
Expand Down
2 changes: 1 addition & 1 deletion alyx/data/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ def __str__(self):
def save(self, *args, **kwargs):
# when a dataset is saved / created make sure the probe insertion is set in the reverse m2m
super(Dataset, self).save(*args, **kwargs)
if self.collection is None:
if not self.collection:
return
self.clean_fields() # Validate collection field
from experiments.models import ProbeInsertion, FOV
Expand Down
11 changes: 9 additions & 2 deletions alyx/data/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ class DatasetSerializer(serializers.HyperlinkedModelSerializer):
hash = serializers.CharField(required=False, allow_null=True)
version = serializers.CharField(required=False, allow_null=True)
file_size = serializers.IntegerField(required=False, allow_null=True)
collection = serializers.CharField(required=False, allow_null=True)
collection = serializers.CharField(required=False, allow_blank=True, allow_null=True)
default_dataset = serializers.BooleanField(required=False, allow_null=True)
public = serializers.ReadOnlyField()
protected = serializers.ReadOnlyField()
Expand Down Expand Up @@ -178,7 +178,7 @@ def get_experiment_number(self, obj):
def create(self, validated_data):
# Get out some useful info
# revision = validated_data.get('revision', None)
collection = validated_data.get('collection', None)
collection = validated_data.get('collection', '')
name = validated_data.get('name', None)
default = validated_data.get('default_dataset', None)
session = validated_data.get('session', None)
Expand Down Expand Up @@ -213,6 +213,13 @@ def create(self, validated_data):

return super(DatasetSerializer, self).create(validated_data)

def to_representation(self, instance):
"""Override the default to_representation method to null the revision field."""
representation = super().to_representation(instance)
if representation.get('revision') is None:
representation['revision'] = ''
return representation

class Meta:
model = Dataset
fields = ('url', 'name', 'created_by', 'created_datetime',
Expand Down
108 changes: 106 additions & 2 deletions alyx/data/tests.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from unittest import mock
from pathlib import Path
from pathlib import PurePosixPath
from uuid import uuid4
from datetime import datetime, timedelta

Expand All @@ -8,12 +8,15 @@
from django.db import transaction
from django.db.utils import IntegrityError
from django.db.models import ProtectedError
from rest_framework.response import Response
from one.alf.path import add_uuid_string

from data.management.commands import files
from data.models import Dataset, DatasetType, Tag, Revision, DataRepository, FileRecord
from subjects.models import Subject
from actions.models import Session
from misc.models import Lab
from data import transfers
from data.transfers import get_dataset_type


Expand Down Expand Up @@ -201,4 +204,105 @@ def _new_delete_client(self, _, gid, **kwargs):

@staticmethod
def _dataset_uuid_name(dataset):
return f'{Path(dataset.name).stem}.{dataset.pk}{Path(dataset.name).suffix}'
return add_uuid_string(dataset.name, dataset.pk).as_posix()


class TestTransfers(TestCase):
"""Tests for the data.transfers module."""

def setUp(self):
"""Create some data repositories and file records to clean up."""
# Two of these are 'large' datasets that will be removed
dtypes = ['ephysData.raw.ap', 'imaging.frames', 'foo.bar.baz']
self.dtypes = [DatasetType.objects.create(name=name) for name in dtypes]
# Create two labs
self.labs = [Lab.objects.create(name=f'lab{i}') for i in range(2)]
# Create four repos
repo1 = DataRepository.objects.create(
name='lab0_local0', lab=self.labs[0], globus_is_personal=True,
globus_endpoint_id=uuid4(), globus_path='/mnt/foo/')
repo2 = DataRepository.objects.create(
name='lab0_local1', lab=self.labs[0], globus_is_personal=True,
globus_endpoint_id=uuid4(), globus_path='/mnt/foo/')
repo3 = DataRepository.objects.create(
name='lab1_local', lab=self.labs[1], globus_is_personal=True,
globus_endpoint_id=uuid4(), globus_path='/mnt/foo/')
# NB: name must contain 'flatiron'!
repo_main = DataRepository.objects.create(
name='flatiron', globus_is_personal=False,
globus_endpoint_id=uuid4(), globus_path='/mnt/foo/')
# Create one session per lab
self.subjects = [
Subject.objects.create(
nickname=f'subject{i}', lab=lab) for i, lab in enumerate(self.labs)]
sessions = [Session.objects.create(
subject=sub, number=1, lab=lab) for lab, sub in zip(self.labs, self.subjects)]
# Create datasets and file records
self.dset_names = ['ephysData.raw.ap.bin', 'imaging.frames.tar.bz2', 'foo.bar.baz']
self.dsets = []
for session in sessions: # for one session in each lab, create one of each dataset
self.dsets.extend(
Dataset.objects.create(name=name, session=session,
dataset_type=next(x for x in self.dtypes if x.name in name))
for name in self.dset_names)

# Create file record on each lab's local server and main repo
session = 'subject/2020-01-01/001'
self.records = [] # All file records
for d in self.dsets:
for i, repo in enumerate((repo1, repo2, repo3, repo_main)):
if repo.globus_is_personal is False:
rel_path = f'{session}/{TestManagementFiles._dataset_uuid_name(d)}'
if i == 0:
rel_path = 'Data2/' + rel_path
if i == 1:
rel_path = '/' + rel_path
elif repo.lab != d.session.lab:
continue # Don't create file record for dataset if session lab different
else:
rel_path = f'{session}/{d.name}'
self.records.append(
FileRecord.objects.create(
relative_path=rel_path, exists=True, dataset=d, data_repository=repo)
)

def test_get_absolute_path(self):
expected = '/mnt/foo/subject/2020-01-01/001/ephysData.raw.ap.bin'
self.assertEqual(expected, transfers._get_absolute_path(self.records[0]))
expected = '/mnt/foo/subject/2020-01-01/001/ephysData.raw.ap.bin'
self.assertEqual(expected, transfers._get_absolute_path(self.records[1]))

def test_get_name_collection_revision(self):
relative_path = PurePosixPath(self.records[0].relative_path)
info, resp = transfers._get_name_collection_revision(
relative_path.name, relative_path.parent.as_posix())
self.assertIsNone(resp)
expected = {
'lab': '', 'subject': 'subject', 'date': '2020-01-01', 'number': '001',
'collection': '', 'revision': '', 'filename': 'ephysData.raw.ap.bin',
'full_path': 'subject/2020-01-01/001/ephysData.raw.ap.bin',
'rel_dir_path': 'subject/2020-01-01/001'}
self.assertDictEqual(info, expected)
relative_path = relative_path.parent / 'alf' / '#2020-10-01#' / relative_path.name
expected.update(
{'collection': 'alf', 'revision': '2020-10-01',
'full_path': relative_path.as_posix()}
)
info, resp = transfers._get_name_collection_revision(
relative_path.name, relative_path.parent.as_posix())
self.assertIsNone(resp)
self.assertDictEqual(info, expected)

relative_path = relative_path.parent / 'invalid' / relative_path.name
info, resp = transfers._get_name_collection_revision(
relative_path.name, relative_path.parent.as_posix())
self.assertIsNone(info)
self.assertIsInstance(resp, Response)
self.assertEqual(resp.status_code, 400)
self.assertIn('Invalid ALF path', resp.data['detail'])
info, resp = transfers._get_name_collection_revision(
relative_path.name, 'subject/1-1-03/1/@lf')
self.assertIsNone(info)
self.assertIsInstance(resp, Response)
self.assertEqual(resp.status_code, 400)
self.assertIn('Invalid ALF path', resp.data['detail'])
8 changes: 4 additions & 4 deletions alyx/data/tests_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ def test_dataset(self):
r = self.post(reverse('dataset-list'), data)
self.ar(r, 201)
# Check collection and revision have been set to default values
self.assertEqual(r.data['revision'], None)
self.assertEqual(r.data['collection'], None)
self.assertEqual(r.data['revision'], '')
self.assertEqual(r.data['collection'], '')
# Check that it has been set as the default dataset
self.assertEqual(r.data['default_dataset'], True)
# Check QC value is NOT_SET by default
Expand All @@ -170,7 +170,7 @@ def test_dataset(self):

r = self.post(reverse('dataset-list'), data)
self.ar(r, 201)
self.assertEqual(r.data['revision'], None)
self.assertEqual(r.data['revision'], '')
self.assertEqual(r.data['collection'], data['collection'])
self.assertEqual(r.data['default_dataset'], True)
self.assertEqual(r.data['qc'], 'PASS')
Expand All @@ -189,7 +189,7 @@ def test_dataset(self):
self.assertEqual(r['default_dataset'], False)

# Make sure if you specify the default dataset flag to false it is indeed false
data['collection'] = None
data['collection'] = ''
data['default_dataset'] = False
r = self.post(reverse('dataset-list'), data)
self.ar(r, 201)
Expand Down
1 change: 0 additions & 1 deletion alyx/jobs/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
DropdownFilter, ChoiceDropdownFilter, RelatedDropdownFilter)

from jobs.models import Task
from misc.models import Lab
from alyx.base import BaseAdmin, get_admin_url


Expand Down

0 comments on commit a8aa9ea

Please sign in to comment.