Exploring the consistency of result from TVB simulations upon removal of nodes.
Entry Format: as-is
Notebook Setup¶
Define some variables
# put some system-specific variables in the namespace ('le' dict)
%run ~/set_localenv_vars.py
nb_name = 'tvb_sim_consistency_analysis'
outdir = '%s/notebooks/%s' %(le['data_dir'], nb_name)
# this is where stuff actually lives
outdir_loc = '%s/%s' %('/mnt/titania-hecuba/mcintosh_lab/john/Data/notebooks', nb_name)
# symlink the two
import os
if not os.path.isdir(outdir_loc): os.makedirs(outdir_loc)
if not os.path.isdir(outdir): os.system('ln -s %s %s' %(outdir_loc,outdir))
!mkdir -p $outdir/pics
# root path to tvb-library, tvb-data, etc. libraries.
tvb_folder = le['code_dir'] + '/libraries_of_others/github/tvb-pack'
# stuff for workdocs-cloudfiles
aws_key = 'drowssaperucesyreva'
aws_secret = '?teytidettopsuoyevah'
sims_outfile = outdir + '/sim_consistency_analysis_data.hdf'
Importage
# Generic imports
import os,sys,glob,numpy as np,pandas as pd
from scipy.io import loadmat
from joblib import Parallel,delayed
from sklearn.decomposition import PCA
from scipy.linalg import svd
from datetime import datetime
from itertools import product
from numpy import corrcoef
from itertools import product
# Visualization stuff
%matplotlib inline
from matplotlib import pyplot as plt
from IPython.display import Image,display as d
import seaborn as sns
sns.set_style('white')
# aws api and workdocs-cloudfiles stuff
sys.path.append(le['ipynb_workdocs_dir'])
from new_utils import nb_fig,cloudfiles_nb
# TVB stuff
nbso,nbse = sys.stdout,sys.stderr # hack part 1/2 to keep output printing properly
sys.path.append(tvb_folder +'/tvb-library')
sys.path.append(tvb_folder + '/tvb-data')
from tvb.simulator.lab import *
sys.stdout,sys.stderr = nbso,nbse # ...hack part 2/2
from copy import copy # seems that this needs to be done last otherwise it gets
# replaced with numpy copy function
Calico document extensions
%%javascript
IPython.load_extensions('calico-spell-check',
'calico-document-tools', 'calico-cell-tools');
Initialize aws api and workdocs-cloudfiles folder
cnb = cloudfiles_nb('aws', [aws_key,aws_secret])
cnb.initialize_folder(nb_name)
Go to output folder
os.chdir(outdir)
Ok, let's get cracking
Define some functions¶
Simulation workhorse function
def run_g2do_sim(conn,continue_sim=None, cs=1., D=0.001, cv=3.0, dt=0.5,
simlen=1e3,int_type='heun_stoch', burn_in=500,
g2do_params={},initconds = None,return_B=True,return_V=True):
if int_type=='heun_stoch':
solver = integrators.HeunStochastic(dt=dt,noise=noise.Additive(nsig=array([D])))
elif int_type=='euler_stoch':
solver = integrators.EulerStochastic(dt=dt,noise=noise.Additive(nsig=array([D])))
elif int_type == 'heun_det':
solver = integrators.HeunDeterministic(dt=dt)
if continue_sim == None:
sim = simulator.Simulator(
model=models.Generic2dOscillator(**g2do_params),
connectivity=conn,
coupling=coupling.Linear(a=cs),
integrator=solver,
monitors=(monitors.TemporalAverage(period=5.0),# 200 Hz
monitors.Bold())
)
sim.configure()
else:
sim = continue_sim
if initconds:
_ = sim.run(simulation_length=5.)
sim.history = np.ones_like(sim.history)*initconds
if burn_in:
_ = sim.run(simulation_length=burn_in)
res = sim.run(simulation_length=simlen)
(V_ts,V_ys),(B_ts,B_ys) = res
if return_B:
df_B = pd.DataFrame(np.squeeze(B_ys), index=B_ts)
else:
df_B = None
if return_V:
df_V = pd.DataFrame(np.squeeze(V_ys), index=V_ts)
else:
df_V = None
return df_V,df_B,sim
Parameter sweep function
def run_g2do_Gsweep(Gs,fixed_params):
paramslist = []
for g in Gs:
newparams = copy(fixed_params)
newparams['cs'] = np.array([g])
paramslist.append(newparams)
sim_res = Parallel(n_jobs=2)\
(delayed(run_g2do_sim)(**p)\
for p in paramslist)
if fixed_params['return_B'] == True:
Bs = {g: r[1] for g,r in zip(Gs,sim_res)}
df_B_all = pd.concat(Bs)
df_B_all.index.names = ['G', 't']
df_B_all.columns.names = ['region']
else:
df_B_all = None
if fixed_params['return_V'] == True:
Vs = {g: r[0] for g,r in zip(Gs,sim_res)}
df_V_all = pd.concat(Vs)
df_V_all.index.names = ['G', 't']
df_V_all.columns.names = ['region']
else:
df_V_all=None
return df_V_all,df_B_all
Functions for computing correlations between empFC/simFC/empSC matrices
def corrmats(mat1,mat2):
r_full = corrcoef(mat1.ravel(), mat2.ravel())[0, 1]
tril_inds = np.tril_indices_from(mat1,-1)
r_tril = corrcoef(mat1[tril_inds],mat2[tril_inds])[0,1]
return r_full,r_tril
def get_conn_corrs(dfdict,SC,FC):
corr_res = {}
for k,v in dfdict.items():
fcmat = v.corr().values
cmfc_full,cmfc_tril = corrmats(fcmat,FC)
cmsc_full,cmsc_tril = corrmats(fcmat,SC)
corr_res[k] = {'corrFC': cmfc_full,
'corrFCtril': cmfc_tril,
'corrSC': cmsc_full,
'corrSCtril': cmsc_tril}
df_corr_res = pd.DataFrame(corr_res)
df_corr_res.index.names = ['corrvar']
df_corr_res.columns.names = ['G']
return df_corr_res
Reference simulations¶
origconn = connectivity.Connectivity(load_default=True)
origconn.configure()
res = run_g2do_sim(origconn,initconds=1.,int_type='heun_det')#,burn_in=None)
tvbcaV_ic1hd_ref,tvbcaB_ic1hd_ref,tvbcasims_ic1hd_ref = res
tvbcaVc_ic1hd_ref = tvbcaV_ic1hd_ref.corr()
res = run_g2do_sim(origconn,initconds=None,int_type='heun_det')#,burn_in=None)
tvbcaV_icrhd_ref,tvbcaB_icrhd_ref,tvbcasims_icrhd_ref = res
tvbcaVc_icrhd_ref = tvbcaV_icrhd_ref.corr()
res = run_g2do_sim(origconn,initconds=None,int_type='heun_stoch')#,burn_in=None)
tvbcaV_icrhs_ref,tvbcaB_icrhs_ref,tvbcasims_icrhs_ref = res
tvbcaVc_icrhs_ref = tvbcaV_icrhs_ref.corr()
Consistency assessment 1: Single parameter set¶
Remove one connection¶
node_idxs = np.arange(origconn.number_of_regions)
rmnodes = node_idxs
n_nodes = node_idxs.shape[0]
Run sims
# Deterministic integration, fixed initial conditions
tvbcaV_ic1hd_1a,tvbcaB_ic1hd_1a,tvbcaVc_ic1hd_1a,\
tvbcaSims_ic1hd_1a,tvbcaCc_ic1hd_1a = {},{},{},{},{}
# Deterministic integration, random initial conditions
tvbcaV_icrhd_1a,tvbcaB_icrhd_1a,tvbcaVc_icrhd_1a,\
tvbcaSims_icrhd_1a,tvbcaCc_icrhd_1a = {},{},{},{},{}
# Stochastic integration, random initial conditions
tvbcaV_icrhs_1a,tvbcaB_icrhs_1a,tvbcaVc_icrhs_1a,\
tvbcaSims_icrhs_1a,tvbcaCc_icrhs_1a = {},{},{},{},{}
for r_it,r in enumerate(rmnodes):
newconn = connectivity.Connectivity(load_default=True)
newconn.weights = origconn.weights.copy()
newconn.weights[r,:] = 0
newconn.weights[:,r] = 0
newconn.configure()
res = run_g2do_sim(newconn,initconds=1.,int_type='heun_det')#,burn_in=None)
tvbcaV_ic1hd_1a[r],tvbcaB_ic1hd_1a[r],tvbcaSims_ic1hd_1a[r] = res
tvbcaVc_ic1hd_1a[r] = tvbcaV_ic1hd_1a[r].corr()
tvbcaCc_ic1hd_1a[r] = corrcoef(tvbcaVc_ic1hd_1a[r].values.ravel(),
tvbcaVc_ic1hd_ref.values.ravel())[0,1]
res = run_g2do_sim(newconn,initconds=None,int_type='heun_det')#,burn_in=None)
tvbcaV_icrhd_1a[r],tvbcaB_icrhd_1a[r],tvbcaSims_icrhd_1a[r] = res
tvbcaVc_icrhd_1a[r] = tvbcaV_icrhd_1a[r].corr()
tvbcaCc_icrhd_1a[r] = corrcoef(tvbcaVc_icrhd_1a[r].values.ravel(),
tvbcaVc_icrhd_ref.values.ravel())[0,1]
res = run_g2do_sim(newconn,initconds=None,int_type='heun_stoch')#,burn_in=None)
tvbcaV_icrhs_1a[r],tvbcaB_icrhs_1a[r],tvbcaSims_icrhs_1a[r] = res
tvbcaVc_icrhs_1a[r] = tvbcaV_icrhs_1a[r].corr()
tvbcaCc_icrhs_1a[r] = corrcoef(tvbcaVc_icrhs_1a[r].values.ravel(),
tvbcaVc_icrhs_ref.values.ravel())[0,1]
df_Cc_1a = pd.DataFrame([tvbcaCc_ic1hd_1a.values(),
tvbcaCc_icrhd_1a.values(),
tvbcaCc_icrhs_1a.values()],
index=['1hd', 'rhd', 'rhs'],
columns=origconn.region_labels[node_idxs]).T
Remove multiple connections¶
Remove 2 nodes
# all combinations of 2 nodes
n_nodes = node_idxs.shape[0]
combs = sorted(product(node_idxs,node_idxs))
combs = [c for c in combs if c[0]!= c[1]]
combs_arr = np.array(combs)
#tmp = combs_arr[np.random.randint(0,combs_arr.shape[0],100),:]
rmnodes = combs_arr[np.random.randint(0,combs_arr.shape[0],100),:]
# Deterministic integration, fixed initial conditions
tvbcaV_ic1hd_1b,tvbcaB_ic1hd_1b,tvbcaVc_ic1hd_1b,\
tvbcaSims_ic1hd_1b,tvbcaCc_ic1hd_1b = {},{},{},{},{}
# Deterministic integration, random initial conditions
tvbcaV_icrhd_1b,tvbcaB_icrhd_1b,tvbcaVc_icrhd_1b,\
tvbcaSims_icrhd_1b,tvbcaCc_icrhd_1b = {},{},{},{},{}
# Stochastic integration, random initial conditions
tvbcaV_icrhs_1b,tvbcaB_icrhs_1b,tvbcaVc_icrhs_1b,\
tvbcaSims_icrhs_1b,tvbcaCc_icrhs_1b = {},{},{},{},{}
for r_it,(r1,r2) in enumerate(rmnodes):
r1r2 = (r1,r2)
newconn = connectivity.Connectivity(load_default=True)
newconn.weights = origconn.weights.copy()
newconn.weights[r1,:] = 0
newconn.weights[:,r1] = 0
newconn.weights[r2,:] = 0
newconn.weights[:,r2] = 0
newconn.configure()
res = run_g2do_sim(newconn,initconds=1.,int_type='heun_det')#,burn_in=None)
tvbcaV_ic1hd_1b[r1r2],tvbcaB_ic1hd_1b[r1r2],tvbcaSims_ic1hd_1b[r1r2] = res
tvbcaVc_ic1hd_1b[r1r2] = tvbcaV_ic1hd_1b[r1r2].corr()
tvbcaCc_ic1hd_1b[r1r2] = corrcoef(tvbcaVc_ic1hd_1b[r1r2].values.ravel(),
tvbcaVc_ic1hd_ref.values.ravel())[0,1]
res = run_g2do_sim(newconn,initconds=None,int_type='heun_det')#,burn_in=None)
tvbcaV_icrhd_1b[r1r2],tvbcaB_icrhd_1b[r1r2],tvbcaSims_icrhd_1b[r1r2] = res
tvbcaVc_icrhd_1b[r1r2] = tvbcaV_icrhd_1b[r1r2].corr()
tvbcaCc_icrhd_1b[r1r2] = corrcoef(tvbcaVc_icrhd_1b[r1r2].values.ravel(),
tvbcaVc_icrhd_ref.values.ravel())[0,1]
res = run_g2do_sim(newconn,initconds=None,int_type='heun_stoch')#,burn_in=None)
tvbcaV_icrhs_1b[r1r2],tvbcaB_icrhs_1b[r1r2],tvbcaSims_icrhs_1b[r1r2] = res
tvbcaVc_icrhs_1b[r1r2] = tvbcaV_icrhs_1b[r1r2].corr()
tvbcaCc_icrhs_1b[r1r2] = corrcoef(tvbcaVc_icrhs_1b[r1r2].values.ravel(),
tvbcaVc_icrhs_ref.values.ravel())[0,1]
df_Cc_1b = pd.DataFrame([tvbcaCc_ic1hd_1b.values(),
tvbcaCc_icrhd_1b.values(),
tvbcaCc_icrhs_1b.values()],
index=['1hd', 'rhd', 'rhs']).T
# columns=origconn.region_labels[node_idxs]).T
Remove 10 nodes
rmnodes = []
for i in range(100):
thispick = []
while len(thispick) < 10:
pick = np.random.randint(0,n_nodes)
if pick not in thispick:
thispick.append(pick)
rmnodes.append(sorted(thispick))
# Deterministic integration, fixed initial conditions
tvbcaV_ic1hd_1c,tvbcaB_ic1hd_1c,tvbcaVc_ic1hd_1c,\
tvbcaSims_ic1hd_1c,tvbcaCc_ic1hd_1c = {},{},{},{},{}
# Deterministic integration, random initial conditions
tvbcaV_icrhd_1c,tvbcaB_icrhd_1c,tvbcaVc_icrhd_1c,\
tvbcaSims_icrhd_1c,tvbcaCc_icrhd_1c = {},{},{},{},{}
# Stochastic integration, random initial conditions
tvbcaV_icrhs_1c,tvbcaB_icrhs_1c,tvbcaVc_icrhs_1c,\
tvbcaSims_icrhs_1c,tvbcaCc_icrhs_1c = {},{},{},{},{}
for r_it,rs in enumerate(rmnodes):
rs = tuple(rs)
newconn = connectivity.Connectivity(load_default=True)
newconn.weights = origconn.weights.copy()
for r in rs:
newconn.weights[r,:] = 0
newconn.weights[:,r] = 0
newconn.configure()
res = run_g2do_sim(newconn,initconds=1.,int_type='heun_det')#,burn_in=None)
tvbcaV_ic1hd_1c[rs],tvbcaB_ic1hd_1c[rs],tvbcaSims_ic1hd_1c[rs] = res
tvbcaVc_ic1hd_1c[rs] = tvbcaV_ic1hd_1c[rs].corr()
tvbcaCc_ic1hd_1c[rs] = corrcoef(tvbcaVc_ic1hd_1c[rs].values.ravel(),
tvbcaVc_ic1hd_ref.values.ravel())[0,1]
res = run_g2do_sim(newconn,initconds=None,int_type='heun_det')#,burn_in=None)
tvbcaV_icrhd_1c[rs],tvbcaB_icrhd_1c[rs],tvbcaSims_icrhd_1c[rs] = res
tvbcaVc_icrhd_1c[rs] = tvbcaV_icrhd_1c[rs].corr()
tvbcaCc_icrhd_1c[rs] = corrcoef(tvbcaVc_icrhd_1c[rs].values.ravel(),
tvbcaVc_icrhd_ref.values.ravel())[0,1]
res = run_g2do_sim(newconn,initconds=None,int_type='heun_stoch')#,burn_in=None)
tvbcaV_icrhs_1c[rs],tvbcaB_icrhs_1c[rs],tvbcaSims_icrhs_1c[rs] = res
tvbcaVc_icrhs_1c[rs] = tvbcaV_icrhs_1c[rs].corr()
tvbcaCc_icrhs_1c[rs] = corrcoef(tvbcaVc_icrhs_1c[rs].values.ravel(),
tvbcaVc_icrhs_ref.values.ravel())[0,1]
df_Cc_1c = pd.DataFrame([tvbcaCc_ic1hd_1c.values(),
tvbcaCc_icrhd_1c.values(),
tvbcaCc_icrhs_1c.values()],
index=['1hd', 'rhd', 'rhs']).T
# columns=origconn.region_labels[node_idxs]).T
Remove 50 nodes
rmnodes = []
for i in range(100):
thispick = []
while len(thispick) < 50:
pick = np.random.randint(0,n_nodes)
if pick not in thispick:
thispick.append(pick)
rmnodes.append(sorted(thispick))
# Deterministic integration, fixed initial conditions
tvbcaV_ic1hd_1d,tvbcaB_ic1hd_1d,tvbcaVc_ic1hd_1d,\
tvbcaSims_ic1hd_1d,tvbcaCc_ic1hd_1d = {},{},{},{},{}
# Deterministic integration, random initial conditions
tvbcaV_icrhd_1d,tvbcaB_icrhd_1d,tvbcaVc_icrhd_1d,\
tvbcaSims_icrhd_1d,tvbcaCc_icrhd_1d = {},{},{},{},{}
# Stochastic integration, random initial conditions
tvbcaV_icrhs_1d,tvbcaB_icrhs_1d,tvbcaVc_icrhs_1d,\
tvbcaSims_icrhs_1d,tvbcaCc_icrhs_1d = {},{},{},{},{}
for r_it,rs in enumerate(rmnodes):
rs = tuple(rs)
newconn = connectivity.Connectivity(load_default=True)
newconn.weights = origconn.weights.copy()
for r in rs:
newconn.weights[r,:] = 0
newconn.weights[:,r] = 0
newconn.configure()
res = run_g2do_sim(newconn,initconds=1.,int_type='heun_det')#,burn_in=None)
tvbcaV_ic1hd_1d[rs],tvbcaB_ic1hd_1d[rs],tvbcaSims_ic1hd_1d[rs] = res
tvbcaVc_ic1hd_1d[rs] = tvbcaV_ic1hd_1d[rs].corr()
tvbcaCc_ic1hd_1d[rs] = corrcoef(tvbcaVc_ic1hd_1d[rs].values.ravel(),
tvbcaVc_ic1hd_ref.values.ravel())[0,1]
res = run_g2do_sim(newconn,initconds=None,int_type='heun_det')#,burn_in=None)
tvbcaV_icrhd_1d[rs],tvbcaB_icrhd_1d[rs],tvbcaSims_icrhd_1d[rs] = res
tvbcaVc_icrhd_1d[rs] = tvbcaV_icrhd_1d[rs].corr()
tvbcaCc_icrhd_1d[rs] = corrcoef(tvbcaVc_icrhd_1d[rs].values.ravel(),
tvbcaVc_icrhd_ref.values.ravel())[0,1]
res = run_g2do_sim(newconn,initconds=None,int_type='heun_stoch')#,burn_in=None)
tvbcaV_icrhs_1d[rs],tvbcaB_icrhs_1d[rs],tvbcaSims_icrhs_1d[rs] = res
tvbcaVc_icrhs_1d[rs] = tvbcaV_icrhs_1d[rs].corr()
tvbcaCc_icrhs_1d[rs] = corrcoef(tvbcaVc_icrhs_1d[rs].values.ravel(),
tvbcaVc_icrhs_ref.values.ravel())[0,1]
df_Cc_1d = pd.DataFrame([tvbcaCc_ic1hd_1d.values(),
tvbcaCc_icrhd_1d.values(),
tvbcaCc_icrhs_1d.values()],
index=['1hd', 'rhd', 'rhs']).T
# columns=origconn.region_labels[node_idxs]).T
Save to file:
df_Cc_1a.to_hdf(sims_outfile, 'df_Cc_1a')
df_Cc_1b.to_hdf(sims_outfile, 'df_Cc_1b')
df_Cc_1c.to_hdf(sims_outfile, 'df_Cc_1c')
df_Cc_1d.to_hdf(sims_outfile, 'df_Cc_1d')
Compare:
(0_0)