Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fixed features processing #77

Merged
merged 1 commit into from
Sep 15, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .ipynb_checkpoints/Untitled-checkpoint.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ There isn't any better doc atm - post an issue if you have any question, or emai
- [Load spike times from unit u](https://github.com/m-beau/NeuroPyxels#load-spike-times-from-unit-u)
- [Load waveforms from unit u](https://github.com/m-beau/NeuroPyxels#load-waveforms-from-unit-u)
- [Compute auto/crosscorrelogram between 2 units](https://github.com/m-beau/NeuroPyxels#compute-autocrosscorrelogram-between-2-units)
- [Plot waveforms and crosscorrelograms of unit u](https://github.com/m-beau/NeuroPyxels#plot-correlograms-and-waveforms-from-unit-u)
- [Plot waveform and crosscorrelograms of unit u](https://github.com/m-beau/NeuroPyxels#plot-correlograms-and-waveforms-from-unit-u)
- [Plot chunk of raw data with overlaid units](https://github.com/m-beau/NeuroPyxels#plot-chunk-of-raw-data-with-overlaid-units)
- [Plot peri-stimulus time histograms across neurons and conditions](https://github.com/m-beau/NeuroPyxels/tree/m-beau#plot-peri-stimulus-time-histograms-across-neurons-and-conditions)
- [Merge datasets acquired on two probes simultaneously](https://github.com/m-beau/NeuroPyxels#merge-datasets-acquired-on-two-probes-simultaneously)
Expand Down Expand Up @@ -98,7 +98,7 @@ dp = 'path/to/dataset'
c = ccg(dp, [234,92], cbin=0.2, cwin=80)
```

### Plot waveform and croccorrelogram of unit u
### Plot waveform and crosscorrelogram of unit u
```python
# all plotting functions return matplotlib figures
from npyx.plot import plot_wvf, get_peak_chan
Expand Down
6 changes: 6 additions & 0 deletions Untitled.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}
2 changes: 1 addition & 1 deletion build/lib/npyx/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,4 @@
npyx.stats
"""

__version__ = '2.0.1'
__version__ = '2.0.2'
2 changes: 1 addition & 1 deletion npyx.egg-info/PKG-INFO
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: npyx
Version: 2.0.1
Version: 2.0.2
Summary: Python routines dealing with Neuropixels data.
Home-page: https://github.com/Npix-routines/NeuroPyxels
Author: Maxime Beau
Expand Down
24 changes: 14 additions & 10 deletions npyx/feat.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"""
import tqdm
from tqdm import tqdm

import numpy as np
from pathlib import Path
Expand All @@ -38,7 +38,7 @@
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from npyx.corr import (ccg, StarkAbeles2009_ccg_significance, ccg_sig_stack, gen_sfc, scaled_acg)
from npyx.gl import get_units
from npyx.gl import get_units, get_npyx_memory
#############################################
# Waveform features

Expand Down Expand Up @@ -791,7 +791,8 @@ def previous_peak(waves, chan_path, unit, n_chans = 20):
# detect_peaks
# find most negative peak
# check if there is a peak before the most negative one
max_chan_path = list(Path(chan_path/'routinesMemory').glob(f'dsm_{unit}_peakchan*'))[0]
dpnm = get_npyx_memory(chan_path)
max_chan_path = list(dpnm.glob(f'dsm_{unit}_peakchan*'))[0]
max_chan = int(np.load(max_chan_path))
# waves = waves.T
if max_chan <= n_chans - 1:
Expand Down Expand Up @@ -932,7 +933,8 @@ def chan_spread(all_wav, chan_path, unit, n_chans = 20, chan_spread_dist = 25.6)
_,_, p2p = consecutive_peaks_amp(all_wav.T)

# search for the file that has the given peak chan
max_chan_path = list(Path(chan_path/'routinesMemory').glob(f'dsm_{unit}_peakchan*'))[0]
dpnm = get_npyx_memory(chan_path)
max_chan_path = list(dpnm.glob(f'dsm_{unit}_peakchan*'))[0]
max_chan = int(np.load(max_chan_path))

chanmap = chan_map(chan_path)
Expand Down Expand Up @@ -1326,7 +1328,8 @@ def chan_spread_bp_plot(dp, unit, n_chans=20):
Input: datapath and unit (drift and shift matched datasets for now)
Returns: plot
"""
curr_fil = dp/'routinesMemory'/f'dsm_{unit}_all_waves_100-82_regular_False300-FalseNone-FalseNone.npy'
dpnm = get_npyx_memory(dp)
curr_fil = dpnm/f'dsm_{unit}_all_waves_100-82_regular_False300-FalseNone-FalseNone.npy'
if curr_fil.is_file():

if n_chans %2 !=0: n_chans +=1
Expand Down Expand Up @@ -1529,7 +1532,8 @@ def gen_ss_cs(recs_fn, show = False):

# dp = "/media/npyx/ssd2/ago/optotag/recordings/PkC/18-08-30_YC001_probe1"
# create the main folder for the images to be saved
ss_cs_folder = Path(ds['dp']+'/routinesMemory/ss_cs')
dpnm = get_npyx_memory(dp)
ss_cs_folder = dpnm / 'ss_cs'
ss_cs_folder.mkdir(exist_ok=True, parents=True)

#%% Find CCGs with long pause (at least 5ms)
Expand Down Expand Up @@ -1598,7 +1602,7 @@ def process_all(recs_fn, show = False, again = False):
all_feat = []
for i, ds in list(recs.items())[:]:
print(f"/nProcessing dataset {ds['dp']}...")
data_root = Path(ds['dp'])/'routinesMemory'
data_root = get_npyx_memory(ds['dp'])
features_folder = data_root / 'features'
acg_folder = data_root / 'acg'
wvf_folder = data_root / 'wvf'
Expand Down Expand Up @@ -1688,7 +1692,7 @@ def process_all(recs_fn, show = False, again = False):
print("Computing PCA features across datasets...")
for i, ds in list(recs.items())[:]:
# data_root = Path('/home/npyx/projects/optotag/proc_data')
data_root = Path(ds['dp'])/'routinesMemory'
data_root = get_npyx_memory(ds['dp'])
features_folder = data_root / 'features'
acg_folder = data_root / 'acg'
wvf_folder = data_root / 'wvf'
Expand Down Expand Up @@ -1799,7 +1803,7 @@ def process_all(recs_fn, show = False, again = False):

for i, ds in list(recs.items())[:]:
# data_root = Path('/home/npyx/projects/optotag/proc_data')
data_root = Path(ds['dp'])/'routinesMemory'
data_root = get_npyx_memory(ds['dp'])
features_folder = data_root / 'features'
acg_folder = data_root / 'acg'
wvf_folder = data_root / 'wvf'
Expand Down Expand Up @@ -1859,7 +1863,7 @@ def process_all(recs_fn, show = False, again = False):

for i, ds in list(recs.items())[:]:
# data_root = Path('/home/npyx/projects/optotag/proc_data')
data_root = Path(ds['dp'])/'routinesMemory'
data_root = get_npyx_memory(ds['dp'])
features_folder = data_root / 'features'
acg_folder = data_root / 'acg'
wvf_folder = data_root / 'wvf'
Expand Down