Skip to content

Commit

Permalink
modified decoder code, decision function and added LP decoding
Browse files Browse the repository at this point in the history
  • Loading branch information
shailajaAkella committed Sep 25, 2024
1 parent b84cee5 commit c6d3242
Show file tree
Hide file tree
Showing 17 changed files with 300,233 additions and 792 deletions.
1,189 changes: 1,189 additions & 0 deletions notebooks/LP_decoder_accuracy_csv.ipynb

Large diffs are not rendered by default.

1,502 changes: 1,502 additions & 0 deletions notebooks/LP_decoder_confidence_csv.ipynb

Large diffs are not rendered by default.

64 changes: 64 additions & 0 deletions notebooks/decode_LP.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import sys
sys.path.append(r"C:\Users\shailaja.akella\Dropbox (Personal)\DR\dynamic_routing_analysis_ethan\src")
import npc_lims
from dynamic_routing_analysis import decoding_utils
from npc_sessions import DynamicRoutingSession

# 1A get all uploaded & annotated ephys sessions
# ephys_sessions = tuple(s for s in npc_lims.get_session_info(is_ephys=True, is_uploaded=True, is_annotated=True))

# 2 set savepath and filename
savepath = r"C:\Users\shailaja.akella\Dropbox (Personal)\DR\dynamic_routing_analysis_ethan\results"
filename = 'decoding_results_linear_shift_20_units_re_run.pkl'

except_list = {}

# 3 set parameters
# linear shift decoding currently just takes the average firing rate over all bins defined here
spikes_binsize = 0.2 # bin size in seconds
spikes_time_before = 0.2 # time before the stimulus per trial
spikes_time_after = 0.01 # time after the stimulus per trial

# #not used for linear shift decoding, were used in a previous iteration of decoding analysis
# decoder_binsize=0.2
# decoder_time_before=0.2
# decoder_time_after=0.1

params = {
'n_units': 20, # number of units to sample for each area
'n_repeats': 25, # number of times to repeat decoding with different randomly sampled units
'input_data_type': 'LP', # spikes or facemap or LP
'vid_angle_facemotion': 'face', # behavior, face, eye
'vid_angle_LP': 'behavior',
'central_section': '4_blocks_plus',
# for linear shift decoding, how many trials to use for the shift. '4_blocks_plus' is best
'exclude_cue_trials': False, # option to totally exclude autorewarded trials
'n_unit_threshold': 20, # minimum number of units to include an area in the analysis
'keep_n_SVDs': 500, # number of SVD components to keep for facemap data
'LP_parts_to_keep': ['ear_base_l', 'eye_bottom_l', 'jaw', 'nose_tip', 'whisker_pad_l_side'],
'spikes_binsize': spikes_binsize,
'spikes_time_before': spikes_time_before,
'spikes_time_after': spikes_time_after,
# 'decoder_binsize':decoder_binsize,
# 'decoder_time_before':decoder_time_before,
# 'decoder_time_after':decoder_time_after,
'savepath': savepath,
'filename': filename,
'use_structure_probe': True, # if True, appedn probe name to area name when multiple probes in the same area
'crossval': '5_fold', # '5_fold' or 'blockwise' - blockwise untested with linear shift
'labels_as_index': True, # convert labels (context names) to index [0,1]
'decoder_type': 'linearSVC', # 'linearSVC' or 'LDA' or 'RandomForest' or 'LogisticRegression'
}
# ephys_sessions[:]

for ephys_session in ['713655_2024-08-05']:
try:
session = DynamicRoutingSession(ephys_session)
print(session.id + ' loaded')
if 'structure' in session.electrodes[:].columns:
decoding_utils.decode_context_with_linear_shift(session, params)
else:
print('no structure column found in electrodes table, moving to next recording')
session = []
except Exception as e:
except_list[session.id] = repr(e)
246 changes: 246 additions & 0 deletions notebooks/decode_context_from_LP_parts.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,246 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "6350a309-0b19-4c9e-bb9b-6e29602b3b5d",
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"sys.path.append(r\"C:\\Users\\shailaja.akella\\Dropbox (Personal)\\DR\\dynamic_routing_analysis_ethan\\src\")\n",
"\n",
"import npc_lims\n",
"from dynamic_routing_analysis import decoding_utils\n",
"from npc_sessions import DynamicRoutingSession\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "6a608c57-4bb6-401a-8c17-fb8f85dec95f",
"metadata": {},
"outputs": [],
"source": [
"#1A get all uploaded & annotated ephys sessions\n",
"ephys_sessions = tuple(s for s in npc_lims.get_session_info(is_ephys=True, is_uploaded=True, is_annotated=True))"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "3aa8b197-c24a-451e-ac1b-1368b1db91cc",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"644867_2023-02-23_0 loaded\n",
"no cached trials table, using npc_sessions\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\shailaja.akella\\Anaconda3\\envs\\npc_sessions2\\lib\\site-packages\\numcodecs\\abc.py:107: UserWarning: Multi-threading is supported for wavpack version>=5.6.4, but current version is 5.5.0. Parallel decoding will not be available.\n",
" return cls(**config)\n",
"aligning sound waveforms: 100%|████████████| 537/537 [01:03<00:00, 8.49trial/s]\n",
"Long exposure times detected for s3://aind-ephys-data/ecephys_644867_2023-02-23_12-14-29/behavior_videos/Behavior_20230223T121631.mp4, suggesting multiple videos captured on sync: len(exposing_time_blocks)=2\n",
"Long exposure times detected for s3://aind-ephys-data/ecephys_644867_2023-02-23_12-14-29/behavior_videos/Face_20230223T121631.mp4, suggesting multiple videos captured on sync: len(exposing_time_blocks)=2\n",
"Long exposure times detected for s3://aind-ephys-data/ecephys_644867_2023-02-23_12-14-29/behavior_videos/Eye_20230223T121631.mp4, suggesting multiple videos captured on sync: len(exposing_time_blocks)=2\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"finished 644867_2023-02-23 0\n",
"finished 644867_2023-02-23\n"
]
}
],
"source": [
"# 2 set savepath and filename\n",
"savepath = r\"C:\\Users\\shailaja.akella\\Dropbox (Personal)\\DR\\dynamic_routing_analysis_ethan\\results\\all_features_LP_updated/\"\n",
"filename = 'decoding_results_linear_shift_LP.pkl'\n",
"\n",
"except_list = {}\n",
"\n",
"# 3 set parameters\n",
"# linear shift decoding currently just takes the average firing rate over all bins defined here\n",
"spikes_binsize = 0.2 # bin size in seconds\n",
"spikes_time_before = 0.2 # time before the stimulus per trial\n",
"spikes_time_after = 0.01 # time after the stimulus per trial\n",
"\n",
"# #not used for linear shift decoding, were used in a previous iteration of decoding analysis\n",
"# decoder_binsize=0.2\n",
"# decoder_time_before=0.2\n",
"# decoder_time_after=0.1\n",
"\n",
"params = {\n",
" 'n_units': 20, # number of units to sample for each area\n",
" 'n_repeats': 25, # number of times to repeat decoding with different randomly sampled units\n",
" 'input_data_type': 'LP', # spikes or facemap or LP\n",
" 'vid_angle_facemotion': 'face', # behavior, face, eye\n",
" 'vid_angle_LP': 'behavior',\n",
" 'central_section': '4_blocks_plus',\n",
" # for linear shift decoding, how many trials to use for the shift. '4_blocks_plus' is best\n",
" 'exclude_cue_trials': False, # option to totally exclude autorewarded trials\n",
" 'n_unit_threshold': 20, # minimum number of units to include an area in the analysis\n",
" 'keep_n_SVDs': 500, # number of SVD components to keep for facemap data\n",
" 'LP_parts_to_keep': ['ear_base_l', 'jaw', 'nose_tip', 'whisker_pad_l_side'],\n",
" 'spikes_binsize': spikes_binsize,\n",
" 'spikes_time_before': spikes_time_before,\n",
" 'spikes_time_after': spikes_time_after,\n",
" # 'decoder_binsize':decoder_binsize,\n",
" # 'decoder_time_before':decoder_time_before,\n",
" # 'decoder_time_after':decoder_time_after,\n",
" 'savepath': savepath,\n",
" 'filename': filename,\n",
" 'use_structure_probe': True, # if True, appedn probe name to area name when multiple probes in the same area\n",
" 'crossval': '5_fold', # '5_fold' or 'blockwise' - blockwise untested with linear shift\n",
" 'labels_as_index': True, # convert labels (context names) to index [0,1]\n",
" 'decoder_type': 'linearSVC', # 'linearSVC' or 'LDA' or 'RandomForest' or 'LogisticRegression'\n",
"}\n",
"\n",
"\n",
"for ephys_session in ephys_sessions[105:106]:\n",
" try:\n",
" session = DynamicRoutingSession(ephys_session)\n",
" print(session.id + ' loaded')\n",
" if 'structure' in session.electrodes[:].columns:\n",
" decoding_utils.decode_context_with_linear_shift(session, params)\n",
" else:\n",
" print('no structure column found in electrodes table, moving to next recording')\n",
" session = []\n",
" except Exception as e:\n",
" except_list[session.id] = repr(e)\n"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "92cfdfc7-29dd-4083-92a9-ef7e794e8742",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(array([105], dtype=int64),)"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"session_ids = [session.id for session in ephys_sessions[:]]\n",
"np.where(np.array(session_ids) == '644867_2023-02-23')"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "e34c3f1d-cade-4103-8d16-2e53c8957780",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{}"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"except_list"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "340acbf2-182c-405f-afb9-00daab7c9f4a",
"metadata": {},
"outputs": [],
"source": [
"644866_2023-02-10, \n",
"676909_2023-12-12, \n",
"676909_2023-12-13\t"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "ab14fea9-3e45-4927-993a-9de808874de0",
"metadata": {},
"outputs": [],
"source": [
"# missing nice\n",
"\n",
"# '706401'\n",
"# '703880'"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "81bd49bd-4081-4029-aca4-070d080ffe2f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[(30, '706401_2024-04-22'),\n",
" (31, '703880_2024-04-18'),\n",
" (34, '703880_2024-04-17'),\n",
" (36, '703880_2024-04-15')]"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"[(i, s.id) for i, s in enumerate(ephys_sessions[:]) if '706401' in s.id or '703880' in s.id]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "95ca8b3b-1c2d-4684-b13d-f746e70db822",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Loading

0 comments on commit c6d3242

Please sign in to comment.