// This model/simulation file has been modified by Adam Shai, 2015
// The original file comes from: Etay Hay, 2011
// Models of Neocortical Layer 5b Pyramidal Cells Capturing a Wide Range of
// Dendritic and Perisomatic Active Properties
// (Hay et al., PLoS Computational Biology, 2011)
// Here we modify the simulation to randomly distribute synapses along the
// dendrites of the cell, as described in (Shai et al., PLoS Computational
// Biology, 2015). This takes a few days to run for each condition. Outputs
// of these simulations are given in the data folder, as well as the matlab
// file to plot them.
//====================== General files and tools =====================
load_file("nrngui.hoc")
//====================== cvode =======================================
objref cvode
cvode = new CVode()
cvode.active(1)
//=================== creating cell object ===========================
load_file("import3d.hoc")
objref L5PC
strdef morphology_file
morphology_file = "../morphologies/cell1.asc"
load_file("../models/L5PCbiophys5.hoc")
load_file("../models/L5PCtemplate.hoc")
L5PC = new L5PCtemplate(morphology_file)
//=================== load files for distributing synapses ===========================
xopen("distSynsUniform.hoc")
xopen("distSynsCluster2.hoc")
//=================== create a Section List for the apical tufts ===========================
forall{ nseg = L/0.25+2 } // changing this can make the simulation run faster but can change the results
objref tuftList
tuftList = new SectionList()
access L5PC.apic[36]
tuftList.subtree()
tuftList.remove()
//=================== Plot the neuron ===========================
objref shapeG
shapeG = new Shape()
//shapeG.color_list(L5PC.basal,3) //red
//shapeG.color_list(tuftList,5) //blue
//shapeG.color_list(basalList,5) // orange
//shapeG.color_list(axonList,0)
//shapeG.color_list(allSections,1)
//=================== create a Section List for the apical trunk ===========================
objref trunk
trunk = new SectionList()
access L5PC.apic[0]
trunk.append()
access L5PC.apic[1]
trunk.append()
access L5PC.apic[2]
trunk.append()
access L5PC.apic[3]
trunk.append()
access L5PC.apic[4]
trunk.append()
access L5PC.apic[14]
trunk.append()
access L5PC.apic[20]
trunk.append()
access L5PC.apic[26]
trunk.append()
access L5PC.apic[34]
trunk.append()
access L5PC.apic[36]
trunk.append()
//=================== Parameters for simulation ===========================
blockca = 1
isDC = 0
strdef s1, s2, s3, s4
if(blockca==0){s1=""}
if(blockca==1){s1="qCa"}
if(blockca){
forsec L5PC.apical{ if(ismembrane("Ca_LVAst")){ gCa_LVAstbar_Ca_LVAst = gCa_LVAstbar_Ca_LVAst*.25}}
forsec L5PC.apical{ if(ismembrane("Ca_HVA")){ gCa_HVAbar_Ca_HVA = gCa_HVAbar_Ca_HVA*.25}}
}
nSS = 175
nSD = 100
//=================== Initialize output files ===========================
objref ff1,ff2
ff1 = new File()
ff2 = new File()
//=================== Initialize biophysical parameters ===========================
objref theStim
access L5PC.soma
theStim = new IClamp(0.5)
theStim.amp = 0.0
if(isDC){
theStim.amp = -0.2
theStim.dur = 800
theStim.del = 0
}
//=================== Initialize synapses ===========================
objref somaSynList, dendSynList, backSynList
somaSynList = new List()
dendSynList = new List()
backSynList = new List()
temp = 0
forsec L5PC.basal{ temp = temp+nseg }
forsec tuftList{ temp = temp+nseg }
bigNum = temp*3
print bigNum
// numClust = $1 //number of clusters
// section list for synapses to be distributed over= $2
// gmax = $3, this is the max conductance of the nmda current
// ntar = $4, this defines the conductance of the ampa current
// with ampa current = gmax/ntar
// synsPerClust =$5 //number of synapses per cluster
// lClust = $6 //length of cluster
//somaSynList = distSynsCluster2(nClustBasal,L5PC.basal,1,1,nPerClust,15) // clustered tuft input
//dendSynList = distSynsCluster2(nClustTuft,tuftList,1,1,nPerClust,15) // cluster basal input
//backSynList = distSynsUniform(0,allSections,1,1) // background input
somaSynList = distSynsUniform(bigNum,L5PC.basal,1,1) // clustered tuft input
dendSynList = distSynsUniform(bigNum,tuftList,1,1) // cluster basal input
//=================== Plot synapse locations ===========================
for i=0,somaSynList.count()-1{
shapeG.point_mark(somaSynList.object(i),2,"O",3)
}
for i=0,dendSynList.count()-1{
shapeG.point_mark(dendSynList.object(i),2,"O",3)
}
//=================== Initialize simulation ===========================
// this is to make sure the model is at rest
// Set all conductances to zero
for i=0,somaSynList.count()-1{ somaSynList.object(i).gmax = 0.0 }
for i=0,dendSynList.count()-1{ dendSynList.object(i).gmax = 0.0 }
//for i=0,backSynList.count()-1{ backSynList.object(i).gmax = 0.0 }
// Find resting potential
access L5PC.soma
objref vrec
vrec = new Vector()
//vrec.record(&L5PC.soma.v(0.5))
cvode.active(1)
//forall{v=-75}
tstop = 1000
run()
vrest = v
print "REST AFTER LONG CVODE RUN IS ", vrest, " AT TIME ", t
objref svstate
svstate = new SaveState()
svstate.save()
cvode.active(0)
svstate.restore()
tstop = t+20
dt = 0.025
while(t<tstop){fadvance()}
vrest = v
print "REST AFTER NONCVODE RUN IS ", vrest, " AT TIME ", t
simStopTime = t
svstate.save()
//=================== End initialize simulation ===========================
objref trec, g1
g1 = new Graph()
access L5PC.soma
distance(0)
sprint(s2,"%s%s%s","freq_",s1,".dat")
ff1.wopen(s2) //freq
sprint(s2,"%s%s%s","spikes_",s1,".dat")
ff2.wopen(s2) //spikes
objref vrec, vrec2, trec
nSomaClust = 30
nDendClust = 20
clustSize = 10
nPerClust = clustSize
nClustBasal = nSomaClust
nClustTuft = nDendClust
objref nClustSyns
nClustSyns = new Vector(nPerClust)
objref rvec
objref rn
rn = new Random()
rn.discunif(0, bigNum-1)
rvec = new Vector()
objref rn2
rn2 = new Random()
rn2.uniform(55,105)
objref apc
objref apcv
objref isiv
// matrixes to put data into
objref mSpikes, mFreq
mSpikes = new Matrix(nClustBasal+1,nClustTuft+1)
mFreq = new Matrix(nClustBasal+1,nClustTuft+1)
print "Begin iterating through clusters"
objref nClustSyns
nClustSyns = new Vector(nPerClust)
//=================== Begin simulations ===========================
print "Begin iterating through clusters"
print "Syns Soma\tSyns Dend\tSpikes\tFreq"
i=0
for i=-1,nClustBasal-1{
for ii=-1,nClustTuft-1{
//print "soma synapse: ", clustSize*(i+1)
//print "dend synapse: ", clustSize*(ii+1)
access L5PC.soma
apc = new APCount(0.5)
apcv = new Vector()
apc.thresh = 0
apc.record(apcv)
// reset all synapses to zero
for iii=0,dendSynList.count()-1{ dendSynList.object(iii).gmax = 0.0 }
for iii=0,somaSynList.count()-1{ somaSynList.object(iii).gmax = 0.0 }
// turn on the appropriate number of randomly chosen basal synapses
if(i!=-1){ //if basal synapses
rvec = new Vector(clustSize*(i+1))
rvec.setrand(rn)
for k=0,rvec.size()-1{
somaSynList.object(rvec.x[k]).gmax=1.0
somaSynList.object(rvec.x[k]).del=(simStopTime+1)
}}
for iii=0,rvec.size()-1{somaSynList.object(rvec.x[iii]).del=(simStopTime+rn2.repick())}
// turn on the appropriate number of randomly chosen dendritic synapses
if(ii!=-1){ //if dend synapses
rvec = new Vector(clustSize*(ii+1))
rvec.setrand(rn)
for k=0,rvec.size()-1{
dendSynList.object(rvec.x[k]).gmax=1.0
dendSynList.object(rvec.x[k]).del=(simStopTime+1)
}}
for iii=0,rvec.size()-1{dendSynList.object(rvec.x[iii]).del=(simStopTime+rn2.repick())}
/*
vrec = new Vector()
vrec2 = new Vector()
trec = new Vector()
*/
//vrec.record(&L5PC.soma.v(0.5))
svstate.restore(1) // go back to rest
// run simulation
tstop = t+200
dt = 0.05
//dt = 0.1
while(t<tstop){
fadvance()
//vrec.append(L5PC.soma.v)
//vrec2.append(L5PC.apic[36].v(0.9))
//trec.append(t)
}
/*
vrec.printf(ff1,"%8.4f\t")
ff1.printf("\n")
vrec2.printf(ff2,"%8.4f\t")
ff2.printf("\n")
*/
isiv = new Vector()
//print "number of spikes is ", apcv.size()
for iii=1,apcv.size()-1{
isiv.append(apcv.x(iii)-apcv.x(iii-1))
}
if(apcv.size()==1){isiv.append(1000)} // single spike
if(apcv.size()==0){isiv.append(10000)} // no spikes
//print "freq is ", 1000/isiv.mean()
//ff1.printf("%8.4f\t",1000/isiv.min()) // uncommenting this will overwrite data
//ff2.printf("%8.4f\t",apcv.size()) // uncommenting this will overwrite data
print clustSize*(i+1),"\t\t",clustSize*(ii+1),"\t\t",apcv.size(),"\t",1000/isiv.min(),"\t",1000/isiv.mean()
//mFreq.x[i][ii]=1000/isiv.mean()
//mSpikes.x[i][ii]=apcv.size()
}//for ii
ff1.printf("\n")
ff2.printf("\n")
}
//=================== End simulations ===========================
//mFreq.fprint(0,ff1)
//mSpikes.fprint(0,ff2)
ff1.close()
ff2.close()
quit()