Skip to content

Commit

Permalink
Merge pull request #874 from cortex-lab/WIP_miles
Browse files Browse the repository at this point in the history
WIP miles
  • Loading branch information
k1o0 authored Nov 15, 2024
2 parents 55e1a2e + f3f1ee3 commit 0c225a4
Show file tree
Hide file tree
Showing 31 changed files with 521 additions and 87 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

Database for experimental neuroscience laboratories

Documentation: http://alyx.readthedocs.io
Documentation: [Installation and getting started](http://alyx.readthedocs.io), [Alyx usage guide](https://docs.google.com/document/d/1cx3XLZiZRh3lUzhhR_p65BggEqTKpXHUDkUDagvf9Kc/edit?usp=sharing)


## Installation
Expand All @@ -16,7 +16,7 @@ this setup will work on other systems. Assumptions made are that you have sudo p
- installing the Python/Django environment
- serving a local database
- registering local data
- accessing local data using [ONE]()
- accessing local data using [ONE](https://one.internationalbrainlab.org)

## Contribution

Expand All @@ -29,6 +29,6 @@ this setup will work on other systems. Assumptions made are that you have sudo p
- `./manage.py test` test with migrations (recommended if model changes)
- NB: When running tests ensure `DEBUG = True` in the settings.py file (specifically `SECURE_SSL_REDIRECT = True` causes REST tests to fail)

```
$ /manage.py test -n
```shell
./manage.py test -n
```
2 changes: 1 addition & 1 deletion alyx/actions/migrations/0022_project_to_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def project2projects(apps, schema_editor):
sessions = Session.objects.exclude(Q(project__isnull=True) | Q(projects=F('project')))

# Check query worked
# from one.util import ensure_list
# from iblutil.util import ensure_list
# for session in sessions.values('pk', 'project', 'projects'):
# assert session['project'] not in ensure_list(session['projects'])

Expand Down
7 changes: 7 additions & 0 deletions alyx/actions/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,13 @@ class SessionDatasetsSerializer(serializers.ModelSerializer):
default_revision = serializers.CharField(source='default_dataset')
qc = BaseSerializerEnumField(required=False)

def to_representation(self, instance):
"""Override the default to_representation method to null the revision field."""
representation = super().to_representation(instance)
if representation.get('revision') is None:
representation['revision'] = ''
return representation

class Meta:
list_serializer_class = FilterDatasetSerializer
model = Dataset
Expand Down
2 changes: 1 addition & 1 deletion alyx/alyx/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
VERSION = __version__ = '3.0.3'
VERSION = __version__ = '3.1.3'
2 changes: 1 addition & 1 deletion alyx/alyx/settings_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
DEBUG = True

# Production settings:
if not DEBUG:
Expand Down
2 changes: 1 addition & 1 deletion alyx/data/management/commands/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def handle(self, *args, **options):
"files on local server. Exiting now."))
return
if before is None:
self.stdout.write(self.style.ERROR("Date beforeshould be specified: use the "
self.stdout.write(self.style.ERROR("Date before should be specified: use the "
"--before=yyyy-mm-dd flag. Exiting now."))
return
dtypes = ['ephysData.raw.ap', 'ephysData.raw.lf', 'ephysData.raw.nidq',
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Generated by Django 5.1.2 on 2024-11-01 17:53

import django.core.validators
from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('data', '0020_alter_datarepository_timezone'),
]

operations = [
migrations.AlterField(
model_name='dataset',
name='collection',
field=models.CharField(blank=True, default='', help_text='file subcollection or subfolder', max_length=255, validators=[django.core.validators.RegexValidator('^[\\w./-]+$', 'Collections must only contain letters, numbers, hyphens, underscores and forward slashes.')]),
preserve_default=False,
),
migrations.AlterField(
model_name='dataset',
name='hash',
field=models.CharField(blank=True, default='', help_text='Hash of the data buffer, SHA-1 is 40 hex chars, while md5is 32 hex chars', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='dataset',
name='version',
field=models.CharField(blank=True, default='', help_text='version of the algorithm generating the file', max_length=64),
preserve_default=False,
),
]
12 changes: 6 additions & 6 deletions alyx/data/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ class Revision(BaseModel):
"Revisions must only contain letters, "
"numbers, hyphens, underscores and forward slashes.")
name = models.CharField(max_length=255, blank=True, help_text="Long name",
unique=True, validators=[name_validator])
unique=True, null=False, validators=[name_validator])
description = models.CharField(max_length=1023, blank=True)
created_datetime = models.DateTimeField(blank=True, null=True, default=timezone.now,
help_text="created date")
Expand Down Expand Up @@ -316,19 +316,19 @@ class Dataset(BaseExperimentalData):
md5 = models.UUIDField(blank=True, null=True,
help_text="MD5 hash of the data buffer")

hash = models.CharField(blank=True, null=True, max_length=64,
hash = models.CharField(blank=True, null=False, max_length=64,
help_text=("Hash of the data buffer, SHA-1 is 40 hex chars, while md5"
"is 32 hex chars"))

# here we usually refer to version as an algorithm version such as ibllib-1.4.2
version = models.CharField(blank=True, null=True, max_length=64,
version = models.CharField(blank=True, null=False, max_length=64,
help_text="version of the algorithm generating the file")

# the collection comprises session sub-folders
collection_validator = RegexValidator(f"^{ALF_SPEC['collection']}$",
"Collections must only contain letters, "
"numbers, hyphens, underscores and forward slashes.")
collection = models.CharField(blank=True, null=True, max_length=255,
collection = models.CharField(blank=True, null=False, max_length=255,
help_text='file subcollection or subfolder',
validators=[collection_validator])

Expand Down Expand Up @@ -386,7 +386,7 @@ def __str__(self):
def save(self, *args, **kwargs):
# when a dataset is saved / created make sure the probe insertion is set in the reverse m2m
super(Dataset, self).save(*args, **kwargs)
if self.collection is None:
if not self.collection:
return
self.clean_fields() # Validate collection field
from experiments.models import ProbeInsertion, FOV
Expand Down Expand Up @@ -452,7 +452,7 @@ def data_url(self):
root = self.data_repository.data_url
if not root:
return None
from one.alf.files import add_uuid_string
from one.alf.path import add_uuid_string
return root + add_uuid_string(self.relative_path, self.dataset.pk).as_posix()

def save(self, *args, **kwargs):
Expand Down
11 changes: 9 additions & 2 deletions alyx/data/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ class DatasetSerializer(serializers.HyperlinkedModelSerializer):
hash = serializers.CharField(required=False, allow_null=True)
version = serializers.CharField(required=False, allow_null=True)
file_size = serializers.IntegerField(required=False, allow_null=True)
collection = serializers.CharField(required=False, allow_null=True)
collection = serializers.CharField(required=False, allow_blank=True, allow_null=True)
default_dataset = serializers.BooleanField(required=False, allow_null=True)
public = serializers.ReadOnlyField()
protected = serializers.ReadOnlyField()
Expand Down Expand Up @@ -178,7 +178,7 @@ def get_experiment_number(self, obj):
def create(self, validated_data):
# Get out some useful info
# revision = validated_data.get('revision', None)
collection = validated_data.get('collection', None)
collection = validated_data.get('collection', '')
name = validated_data.get('name', None)
default = validated_data.get('default_dataset', None)
session = validated_data.get('session', None)
Expand Down Expand Up @@ -213,6 +213,13 @@ def create(self, validated_data):

return super(DatasetSerializer, self).create(validated_data)

def to_representation(self, instance):
"""Override the default to_representation method to null the revision field."""
representation = super().to_representation(instance)
if representation.get('revision') is None:
representation['revision'] = ''
return representation

class Meta:
model = Dataset
fields = ('url', 'name', 'created_by', 'created_datetime',
Expand Down
108 changes: 106 additions & 2 deletions alyx/data/tests.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from unittest import mock
from pathlib import Path
from pathlib import PurePosixPath
from uuid import uuid4
from datetime import datetime, timedelta

Expand All @@ -8,12 +8,15 @@
from django.db import transaction
from django.db.utils import IntegrityError
from django.db.models import ProtectedError
from rest_framework.response import Response
from one.alf.path import add_uuid_string

from data.management.commands import files
from data.models import Dataset, DatasetType, Tag, Revision, DataRepository, FileRecord
from subjects.models import Subject
from actions.models import Session
from misc.models import Lab
from data import transfers
from data.transfers import get_dataset_type


Expand Down Expand Up @@ -201,4 +204,105 @@ def _new_delete_client(self, _, gid, **kwargs):

@staticmethod
def _dataset_uuid_name(dataset):
return f'{Path(dataset.name).stem}.{dataset.pk}{Path(dataset.name).suffix}'
return add_uuid_string(dataset.name, dataset.pk).as_posix()


class TestTransfers(TestCase):
"""Tests for the data.transfers module."""

def setUp(self):
"""Create some data repositories and file records to clean up."""
# Two of these are 'large' datasets that will be removed
dtypes = ['ephysData.raw.ap', 'imaging.frames', 'foo.bar.baz']
self.dtypes = [DatasetType.objects.create(name=name) for name in dtypes]
# Create two labs
self.labs = [Lab.objects.create(name=f'lab{i}') for i in range(2)]
# Create four repos
repo1 = DataRepository.objects.create(
name='lab0_local0', lab=self.labs[0], globus_is_personal=True,
globus_endpoint_id=uuid4(), globus_path='/mnt/foo/')
repo2 = DataRepository.objects.create(
name='lab0_local1', lab=self.labs[0], globus_is_personal=True,
globus_endpoint_id=uuid4(), globus_path='/mnt/foo/')
repo3 = DataRepository.objects.create(
name='lab1_local', lab=self.labs[1], globus_is_personal=True,
globus_endpoint_id=uuid4(), globus_path='/mnt/foo/')
# NB: name must contain 'flatiron'!
repo_main = DataRepository.objects.create(
name='flatiron', globus_is_personal=False,
globus_endpoint_id=uuid4(), globus_path='/mnt/foo/')
# Create one session per lab
self.subjects = [
Subject.objects.create(
nickname=f'subject{i}', lab=lab) for i, lab in enumerate(self.labs)]
sessions = [Session.objects.create(
subject=sub, number=1, lab=lab) for lab, sub in zip(self.labs, self.subjects)]
# Create datasets and file records
self.dset_names = ['ephysData.raw.ap.bin', 'imaging.frames.tar.bz2', 'foo.bar.baz']
self.dsets = []
for session in sessions: # for one session in each lab, create one of each dataset
self.dsets.extend(
Dataset.objects.create(name=name, session=session,
dataset_type=next(x for x in self.dtypes if x.name in name))
for name in self.dset_names)

# Create file record on each lab's local server and main repo
session = 'subject/2020-01-01/001'
self.records = [] # All file records
for d in self.dsets:
for i, repo in enumerate((repo1, repo2, repo3, repo_main)):
if repo.globus_is_personal is False:
rel_path = f'{session}/{TestManagementFiles._dataset_uuid_name(d)}'
elif repo.lab != d.session.lab:
continue # Don't create file record for dataset if session lab different
else:
rel_path = f'{session}/{d.name}'
if i == 0:
rel_path = 'Data2/' + rel_path
if i == 1:
rel_path = '/' + rel_path
self.records.append(
FileRecord.objects.create(
relative_path=rel_path, exists=True, dataset=d, data_repository=repo)
)

def test_get_absolute_path(self):
expected = '/mnt/foo/subject/2020-01-01/001/ephysData.raw.ap.bin'
self.assertEqual(expected, transfers._get_absolute_path(self.records[0]))
expected = '/mnt/foo/subject/2020-01-01/001/ephysData.raw.ap.bin'
self.assertEqual(expected, transfers._get_absolute_path(self.records[1]))

def test_get_name_collection_revision(self):
relative_path = PurePosixPath(self.records[0].relative_path)
info, resp = transfers._get_name_collection_revision(
relative_path.name, relative_path.parent.as_posix())
self.assertIsNone(resp)
expected = {
'lab': '', 'subject': 'subject', 'date': '2020-01-01', 'number': '001',
'collection': '', 'revision': '', 'filename': 'ephysData.raw.ap.bin',
'full_path': 'Data2/subject/2020-01-01/001/ephysData.raw.ap.bin',
'rel_dir_path': 'subject/2020-01-01/001'}
self.assertDictEqual(info, expected)
relative_path = relative_path.parent / 'alf' / '#2020-10-01#' / relative_path.name
expected.update(
{'collection': 'alf', 'revision': '2020-10-01',
'full_path': relative_path.as_posix()}
)
info, resp = transfers._get_name_collection_revision(
relative_path.name, relative_path.parent.as_posix())
self.assertIsNone(resp)
self.assertDictEqual(info, expected)

relative_path = relative_path.parent / 'invalid' / relative_path.name
info, resp = transfers._get_name_collection_revision(
relative_path.name, relative_path.parent.as_posix())
self.assertIsNone(info)
self.assertIsInstance(resp, Response)
self.assertEqual(resp.status_code, 400)
self.assertIn('Invalid ALF path', resp.data['detail'])
info, resp = transfers._get_name_collection_revision(
relative_path.name, 'subject/1-1-03/1/@lf')
self.assertIsNone(info)
self.assertIsInstance(resp, Response)
self.assertEqual(resp.status_code, 400)
self.assertIn('Invalid ALF path', resp.data['detail'])
8 changes: 4 additions & 4 deletions alyx/data/tests_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ def test_dataset(self):
r = self.post(reverse('dataset-list'), data)
self.ar(r, 201)
# Check collection and revision have been set to default values
self.assertEqual(r.data['revision'], None)
self.assertEqual(r.data['collection'], None)
self.assertEqual(r.data['revision'], '')
self.assertEqual(r.data['collection'], '')
# Check that it has been set as the default dataset
self.assertEqual(r.data['default_dataset'], True)
# Check QC value is NOT_SET by default
Expand All @@ -170,7 +170,7 @@ def test_dataset(self):

r = self.post(reverse('dataset-list'), data)
self.ar(r, 201)
self.assertEqual(r.data['revision'], None)
self.assertEqual(r.data['revision'], '')
self.assertEqual(r.data['collection'], data['collection'])
self.assertEqual(r.data['default_dataset'], True)
self.assertEqual(r.data['qc'], 'PASS')
Expand All @@ -189,7 +189,7 @@ def test_dataset(self):
self.assertEqual(r['default_dataset'], False)

# Make sure if you specify the default dataset flag to false it is indeed false
data['collection'] = None
data['collection'] = ''
data['default_dataset'] = False
r = self.post(reverse('dataset-list'), data)
self.ar(r, 201)
Expand Down
Loading

0 comments on commit 0c225a4

Please sign in to comment.