Skip to content

Commit

Permalink
Merge branch 'dev' into surgeryImplantWeight
Browse files Browse the repository at this point in the history
  • Loading branch information
k1o0 committed May 10, 2024
2 parents 4b28460 + aadfa12 commit 97eb0fa
Show file tree
Hide file tree
Showing 459 changed files with 323 additions and 125,002 deletions.
6 changes: 4 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ jobs:
sudo touch /var/log/alyx_json.log; sudo chmod 666 /var/log/alyx_json.log
cd alyx
cp alyx/settings_ci.py alyx/settings.py
python manage.py collectstatic --noinput --link
coverage run manage.py test -n
coveralls --service=github
env:
Expand All @@ -73,7 +74,7 @@ jobs:
echo "requirements_frozen.txt unchanged"
rm requirements_frozen_temp.txt
echo "GIT_PUSH_NEEDED=false" >> "$GITHUB_ENV"
else
else
echo "requirements_frozen.txt is different, git push needed"
mv requirements_frozen_temp.txt requirements_frozen.txt
echo "GIT_PUSH_NEEDED=true" >> "$GITHUB_ENV"
Expand All @@ -85,7 +86,8 @@ jobs:
run: |
git config user.name github-actions
git config user.email [email protected]
git commit -m "GitHub Actions generated requirements_frozen.txt" -a
git add requirements_frozen.txt
git commit -m "GitHub Actions generated requirements_frozen.txt"
git push
# Docker steps only run when master branch pushed to directly OR when a PR is merged
Expand Down
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ alyx/templates/ibl_reports
alyx/alyx/settings_secret.py
alyx/alyx/settings_lab.py
alyx/alyx/settings.py

alyx/static/*/*
scripts/deployment_examples/docker-apache/settings*

alyx/.idea/
Expand Down
5 changes: 4 additions & 1 deletion alyx/actions/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,7 +459,10 @@ def get(self, request, format=None, nickname=None):
records = subject.water_control.to_jsonable(start_date=start_date, end_date=end_date)
date_str = datetime.strptime(start_date, '%Y-%m-%d') if start_date else None
ref_iw = subject.water_control.reference_implant_weight_at(date_str)
data = {'subject': nickname, 'implant_weight': ref_iw, 'records': records}
data = {'subject': nickname, 'implant_weight': ref_iw,
'reference_weight_pct': subject.water_control.reference_weight_pct,
'zscore_weight_pct': subject.water_control.zscore_weight_pct,
'records': records}
return Response(data)


Expand Down
5 changes: 4 additions & 1 deletion alyx/actions/water_control.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from operator import attrgetter, itemgetter
import os.path as op

from django.conf import settings
from django.urls import reverse
from django.utils.html import format_html
from django.http import HttpResponse
Expand Down Expand Up @@ -594,7 +595,9 @@ def water_control(subject):
subject_id=subject.id
)
wc.add_threshold(percentage=rw_pct + zw_pct, bgcolor=PALETTE['orange'], fgcolor='#FFC28E')
wc.add_threshold(percentage=.7, bgcolor=PALETTE['red'], fgcolor='#F08699', line_style='--')
if absolute_min := settings.WEIGHT_THRESHOLD:
wc.add_threshold(
percentage=absolute_min, bgcolor=PALETTE['red'], fgcolor='#F08699', line_style='--')
# Water restrictions.
wrs = sorted(list(subject.actions_waterrestrictions.all()), key=attrgetter('start_time'))
# Surgeries.
Expand Down
2 changes: 1 addition & 1 deletion alyx/alyx/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
VERSION = __version__ = '2.0.0'
VERSION = __version__ = '2.1.1'
10 changes: 10 additions & 0 deletions alyx/alyx/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,6 +378,15 @@ def has_change_permission(self, request, obj=None):
return True
if request.user.is_superuser:
return True

# [CR 2024-03-12]
# HACK: following a request by Charu R from cortexlab, we authorize all users in the
# special Husbandry group to edit litters.
husbandry = 'husbandry' in ', '.join(_.name.lower() for _ in request.user.groups.all())
if husbandry:
if obj.__class__.__name__ in ('Litter', 'Subject', 'BreedingPair'):
return True

# Find subject associated to the object.
if hasattr(obj, 'responsible_user'):
subj = obj
Expand Down Expand Up @@ -640,6 +649,7 @@ class BaseRestPublicPermission(permissions.BasePermission):
"""
The purpose is to prevent public users from interfering in any way using writable methods
"""

def has_permission(self, request, view):
if request.method == 'GET':
return True
Expand Down
4 changes: 2 additions & 2 deletions alyx/alyx/settings_lab_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
# ALYX-SPECIFIC
ALLOWED_HOSTS = ['localhost', '127.0.0.1']
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'GB'
TIME_ZONE = 'Europe/London'
GLOBUS_CLIENT_ID = '525cc543-8ccb-4d11-8036-af332da5eafd'
SUBJECT_REQUEST_EMAIL_FROM = '[email protected]'
DEFAULT_SOURCE = 'IBL'
DEFAULT_PROTOCOL = '1'
SUPERUSERS = ('root',)
STOCK_MANAGERS = ('root',)
WEIGHT_THRESHOLD = 0.75
WEIGHT_THRESHOLD = 0.8 # Absolute minimum weight threshold (red line in plots)
DEFAULT_LAB_NAME = 'defaultlab'
WATER_RESTRICTIONS_EDITABLE = False # if set to True, all users can edit water restrictions
DEFAULT_LAB_PK = '4027da48-7be3-43ec-a222-f75dffe36872'
Expand Down
12 changes: 12 additions & 0 deletions alyx/data/fixtures/data.dataformat.json
Original file line number Diff line number Diff line change
Expand Up @@ -322,5 +322,17 @@
"matlab_loader_function": "",
"python_loader_function": "sparse.load_npz"
}
},
{
"model": "data.dataformat",
"pk": "f6474942-3f0e-4a68-89a1-64d8d600d953",
"fields": {
"json": "",
"name": "npz",
"description": "A zipped archive of NumPy objects.",
"file_extension": ".npz",
"matlab_loader_function": "",
"python_loader_function": "numpy.load"
}
}
]
46 changes: 45 additions & 1 deletion alyx/data/fixtures/data.datasettype.json
Original file line number Diff line number Diff line change
Expand Up @@ -1833,7 +1833,7 @@
"name": "camera.lightningPose",
"created_by": null,
"description": "Dataframe with coordinates of body parts as estimated by Lightning Pose algorithm",
"filename_pattern": ""
"filename_pattern": "*camera.lightningPose.*"
}
},
{
Expand Down Expand Up @@ -2231,5 +2231,49 @@
"description": "The start and end times of the laser stimulation period.",
"filename_pattern": ""
}
},
{
"model": "data.datasettype",
"pk": "c2eae195-624d-4ff5-a533-01d78caf5805",
"fields": {
"json": null,
"name": "waveforms.traces",
"created_by": null,
"description": "Snippets of lightly preprocessed waveforms from spikes, organized by unit.",
"filename_pattern": ""
}
},
{
"model": "data.datasettype",
"pk": "482dc8e6-0fda-4c9b-877d-f114ef75a9ff",
"fields": {
"json": null,
"name": "waveforms.templates",
"created_by": null,
"description": "Medians of unit waveforms.",
"filename_pattern": ""
}
},
{
"model": "data.datasettype",
"pk": "ff5867ab-cd28-4fde-9baa-142e2bc2f4fc",
"fields": {
"json": null,
"name": "waveforms.table",
"created_by": null,
"description": "Table containing the sample index, cluster id, and peak channel id for each waveform extracted.",
"filename_pattern": ""
}
},
{
"model": "data.datasettype",
"pk": "be70b6ac-7635-4554-9615-e40277a138ae",
"fields": {
"json": null,
"name": "waveforms.channels",
"created_by": null,
"description": "Array containing the channel ids for each waveform.",
"filename_pattern": ""
}
}
]
23 changes: 23 additions & 0 deletions alyx/data/migrations/0020_alter_datarepository_timezone.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Generated by Django 5.0 on 2024-03-26 15:27

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("data", "0019_dataset_qc"),
]

operations = [
migrations.AlterField(
model_name="datarepository",
name="timezone",
field=models.CharField(
blank=True,
default="Europe/London",
help_text="Timezone of the server (see https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)",
max_length=64,
),
),
]
34 changes: 34 additions & 0 deletions alyx/data/tests_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -761,6 +761,40 @@ def test_protected_view(self):
self.assertEqual(name, 'test_prot/a.b.e1')
self.assertEqual(prot_info, [])

def test_check_protected(self):
self.post(reverse('datarepository-list'), {'name': 'drb1', 'hostname': 'hostb1'})
self.post(reverse('lab-list'), {'name': 'labb', 'repositories': ['drb1']})

# Create protected tag
self.client.post(reverse('tag-list'), {'name': 'tag1', 'protected': True})

# Create some datasets and register
data = {'path': '%s/2018-01-01/002/' % self.subject,
'filenames': 'test_prot/a.c.e2',
'name': 'drb1', # this is the repository name
}

d = self.ar(self.client.post(reverse('register-file'), data), 201)

# Check the same dataset to see if it is protected, should be unprotected
# and get a status 200 respons
_ = data.pop('name')

r = self.ar(self.client.get(reverse('check-protected'), data=data,
content_type='application/json'), 200)
self.assertEqual(r['status_code'], 200)

# add protected tag to the first dataset
dataset1 = Dataset.objects.get(pk=d[0]['id'])
tag1 = Tag.objects.get(name='tag1')
dataset1.tags.add(tag1)

# Check the same dataset to see if it is protected
r = self.ar(self.client.get(reverse('check-protected'), data=data,
content_type='application/json'), 200)
self.assertEqual(r['status_code'], 403)
self.assertEqual(r['error'], 'One or more datasets is protected')

def test_revisions(self):
# Check revision lookup with name
self.post(reverse('revision-list'), {'name': 'v2'})
Expand Down
6 changes: 6 additions & 0 deletions alyx/data/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
'post': 'create'
})

check_protected = dv.ProtectedFileViewSet.as_view({
'get': 'list'
})

urlpatterns = [
path('data-formats', dv.DataFormatList.as_view(),
Expand Down Expand Up @@ -78,4 +81,7 @@
path('sync-file-status', sync_file_status,
name="sync-file-status"),

path('check-protected', check_protected,
name="check-protected"),

]
73 changes: 73 additions & 0 deletions alyx/data/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,79 @@ def _parse_path(path):
return subject, date, session_number


class ProtectedFileViewSet(mixins.ListModelMixin,
viewsets.GenericViewSet):

serializer_class = serializers.Serializer

def list(self, request):
"""
Endpoint to check if set of files are protected or not
The session is retrieved by the ALF convention in the relative path, so this field has to
match the format Subject/Date/Number as shown below.
The client side REST query should look like this:
```python
r_ = {'created_by': 'user_name_alyx',
'path': 'ZM_1085/2019-02-12/002/alf', # relative path to repo path
'filenames': ['file1', 'file2'],
}
```
Returns a response indicating if any of the datasets are protected or not
- Status 403 if a dataset is protected, details contains a list of protected datasets
- Status 200 is none of the datasets are protected
"""

req = request.GET.dict() if len(request.data) == 0 else request.data

user = req.get('created_by', None)
if user:
user = get_user_model().objects.get(username=user)
else:
user = request.user

rel_dir_path = req.get('path', '')
if not rel_dir_path:
raise ValueError("The path argument is required.")

# Extract the data repository from the hostname, the subject, the directory path.
rel_dir_path = rel_dir_path.replace('\\', '/')
rel_dir_path = rel_dir_path.replace('//', '/')
subject, date, session_number = _parse_path(rel_dir_path)

filenames = req.get('filenames', ())
if isinstance(filenames, str):
filenames = filenames.split(',')

session = _get_session(
subject=subject, date=date, number=session_number, user=user)
assert session

# Loop through the files to see if any are protected
prot_response = []
protected = []
for file in filenames:
info, resp = _get_name_collection_revision(file, rel_dir_path)
if resp:
return resp
prot, prot_info = _check_dataset_protected(
session, info['collection'], info['filename'])
protected.append(prot)
prot_response.append({file: prot_info})
if any(protected):
data = {'status_code': 403,
'error': 'One or more datasets is protected',
'details': prot_response}
return Response(data=data)
else:
data = {'status_code': 200,
'details': 'None of the datasets are protected'}
return Response(data=data)


class RegisterFileViewSet(mixins.CreateModelMixin,
viewsets.GenericViewSet):

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Generated by Django 4.2.11 on 2024-03-22 12:45

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('experiments', '0012_fov_imagingstack_imagingtype_alter_channel_lateral_and_more'),
]

operations = [
migrations.RemoveConstraint(
model_name='trajectoryestimate',
name='unique_trajectory_per_provenance',
),
migrations.AddConstraint(
model_name='trajectoryestimate',
constraint=models.UniqueConstraint(condition=models.Q(('probe_insertion__isnull', True)), fields=('provenance', 'chronic_insertion'), name='unique_trajectory_per_chronic_provenance'),
),
migrations.AddConstraint(
model_name='trajectoryestimate',
constraint=models.UniqueConstraint(condition=models.Q(('probe_insertion__isnull', False)), fields=('provenance', 'probe_insertion'), name='unique_trajectory_per_provenance'),
),
]
Loading

0 comments on commit 97eb0fa

Please sign in to comment.