Skip to content

Commit

Permalink
modified ordering script
Browse files Browse the repository at this point in the history
  • Loading branch information
epnev committed May 6, 2016
1 parent ebcf31d commit 9d557f5
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 15 deletions.
19 changes: 10 additions & 9 deletions ca_source_extraction/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -1247,15 +1247,16 @@ def evaluate_components(traces,N=5,robust_std=False):
probability at each time step of observing the N consequtive actual trace values given the distribution of noise
"""

# import pdb
# pdb.set_trace()
md=mode_robust(traces,axis=1)
ff1=traces-md[:,None]
ff1=-ff1*(ff1<0) # only consider values under the mode to determine the noise standard deviation
if robust_std:
# compute 25 percentile
ff1=np.sort(ff1[:].T,axis=0).T
ff1=np.sort(ff1,axis=1)
ff1[ff1==0]=np.nan
Ns=np.round(np.sum(ff1>0,1)*.75)
Ns=np.round(np.sum(ff1>0,1)*.5)
iqr_h=np.zeros(traces.shape[0])

for idx,el in enumerate(ff1):
Expand All @@ -1268,25 +1269,25 @@ def evaluate_components(traces,N=5,robust_std=False):
Ns=np.sum(ff1>0,1)
sd_r=np.sqrt(np.sum(ff1**2,1)/Ns)
#

from scipy.stats import norm

# compute z value
z=(traces-md[:,None])/sd_r[:,None]
z=(traces-md[:,None])/sd_r[:,None]
# probability of observing values larger or equal to z given notmal distribution with mean md and std sd_r
erf=.5*scipy.special.erfc(z/np.sqrt(2))
erf = 1 - norm.cdf(z)
# use logarithm so that multiplication becomes sum
erf=-np.log10(erf)
erf=np.log(erf)
filt = np.ones(N)
# moving sum
erfc=np.apply_along_axis(lambda m: np.convolve(m, filt, mode='full'), axis=1, arr=erf)

#select the maximum value of such probability for each trace
fitness=np.max(erfc,1)
fitness=np.min(erfc,1)

ordered=np.argsort(fitness)


idx_components=ordered[::-1]# selec only portion of components
idx_components=ordered #[::-1]# selec only portion of components
fitness=fitness[idx_components]
erfc=erfc[idx_components]

Expand Down
13 changes: 7 additions & 6 deletions demo_patches.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
corr_image=1
if corr_image:
# build correlation image
Cn=cse.utilities.local_correlations(Y[:,:,:10000])
Cn=cse.utilities.local_correlations(Y[:,:,:3000])
else:
# build mean image
Cn=np.mean(Y[:,:,:memory_fact*10000],axis=-1)
Expand All @@ -76,7 +76,8 @@
if save_results:
np.savez('results_analysis_patch.npz',A_tot=A_tot.todense(), C_tot=C_tot, sn_tot=sn_tot,d1=d1,d2=d2)
#%% if you have many components this might take long!
crd = cse.utilities.plot_contours(A_tot,Cn,thr=0.9,vmax=0.8, vmin=None)
pl.figure()
crd = cse.utilities.plot_contours(A_tot,Cn,thr=0.9)
#%% set parameters for full field of view analysis
options = cse.utilities.CNMFSetParms(Y,n_processes,p=0,gSig=gSig,K=A_tot.shape[-1],thr=merge_thresh)
pix_proc=np.minimum(np.int((d1*d2)/n_processes/(T/2000.)),np.int((d1*d2)/n_processes)) # regulates the amount of memory used
Expand Down Expand Up @@ -105,10 +106,10 @@
for log_file in log_files:
os.remove(log_file)
#%% order components according to a quality threshold and only select the ones wiht qualitylarger than quality_threshold.
quality_threshold=100
#quality_threshold=0
traces=C2+YrA
idx_components, fitness, erfc = cse.utilities.evaluate_components(traces,N=5,robust_std=True)
idx_components=idx_components[fitness>quality_threshold]
idx_components, fitness, erfc = cse.utilities.evaluate_components(traces,N=5,robust_std=False)
#idx_components=idx_components[fitness<quality_threshold]

print(idx_components.size*1./traces.shape[0])
#%% save analysis results in python and matlab format
Expand All @@ -120,4 +121,4 @@
#%%
# select only portion of components
pl.figure();
crd = cse.utilities.plot_contours(A2.tocsc()[:,idx_components],Cn,thr=0.9)
crd = cse.utilities.plot_contours(A2.tocsc()[:,idx_components],Cn,thr=0.9)

0 comments on commit 9d557f5

Please sign in to comment.