{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import neuron\n",
    "usr_nm = os.path.expanduser('~')\n",
    "neuron.load_mechanisms(usr_nm + '/Documents/NEURON_mechanism/')\n",
    "from neuron import h, gui\n",
    "import numpy as np\n",
    "import matplotlib\n",
    "import matplotlib.pyplot as plt\n",
    "import matplotlib.cm as cm\n",
    "from matplotlib.colors import ListedColormap\n",
    "from scipy.signal import find_peaks\n",
    "from sklearn.svm import SVC\n",
    "import random \n",
    "import time\n",
    "from sklearn.linear_model import LinearRegression"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "font = {'family' : 'Arial',\n",
    "        'weight' : 'normal',\n",
    "        'size'   : 6}\n",
    "\n",
    "plt.rc('font', **font)\n",
    "plt.rcParams['font.family'] = 'Arial'\n",
    "matplotlib.rcParams['pdf.fonttype'] = 42\n",
    "matplotlib.rcParams['ps.fonttype'] = 42"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "run general_fun.ipynb"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "h.xopen(\"MG_reconstructed_by_Nate_Sawtell\");\n",
    "\n",
    "len_seg = 30\n",
    "for sec in h.all:\n",
    "    sec.nseg = int(np.ceil(sec.L/len_seg))\n",
    "    if not (sec.nseg % 2):\n",
    "        sec.nseg = sec.nseg + 1\n",
    "        \n",
    "vml_thresh = 100\n",
    "h.distance(sec=h.soma)\n",
    "\n",
    "vml = h.SectionList()\n",
    "dml = h.SectionList()\n",
    "exclude_comp = ['apic[0]']\n",
    "for sec in h.apical:\n",
    "    if sec.hname() not in exclude_comp:\n",
    "        dist_from_soma = h.distance(sec(.5))\n",
    "        if dist_from_soma < vml_thresh:\n",
    "            vml.append(sec=sec)\n",
    "        elif dist_from_soma >= vml_thresh:\n",
    "            dml.append(sec=sec)\n",
    "\n",
    "apic_soma = h.SectionList()  \n",
    "for sec in h.apical:\n",
    "    apic_soma.append(sec=sec)\n",
    "for sec in h.somatic:\n",
    "    apic_soma.append(sec=sec)\n",
    "        \n",
    "h.celsius = 20\n",
    "set_init = -65\n",
    "h.v_init = set_init\n",
    "\n",
    "spe_cond = 0.0003\n",
    "capac = 1\n",
    "# insert channels\n",
    "\n",
    "for sec in h.all:\n",
    "    sec.Ra = 100    # Axial resistance in Ohm * cm\n",
    "    sec.cm = capac      # Membrane capacitance in micro Farads / cm^2\n",
    "for sec in h.basal:\n",
    "        sec.insert('pas')\n",
    "        sec.g_pas = spe_cond\n",
    "        sec.e_pas = set_init"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "def make_bse(TTX = False, TTX_axon = False, TTX_apical = False, set_init = -65):\n",
    "\n",
    "    axon_no_hh = ['axon[0]']\n",
    "    for sec in h.axonal:\n",
    "        if any(word in sec.hname() for word in axon_no_hh):  \n",
    "#             sec.insert('pas')\n",
    "#             sec.g_pas = spe_cond\n",
    "#             sec.e_pas = set_init\n",
    "            sec.insert('hh')\n",
    "            sec.gl_hh = spe_cond\n",
    "            sec.el_hh = set_init\n",
    "            if TTX or TTX_axon:      \n",
    "                sec.gnabar_hh = 0\n",
    "                sec.gkbar_hh =  0\n",
    "            else:\n",
    "                sec.gnabar_hh = 0.168 # 2\n",
    "                sec.gkbar_hh =  0.05  # 0.2\n",
    "        else:\n",
    "            sec.insert('hh')\n",
    "            sec.gl_hh = spe_cond\n",
    "            sec.el_hh = set_init\n",
    "            if TTX or TTX_axon:      \n",
    "                sec.gnabar_hh = 0\n",
    "                sec.gkbar_hh =  0\n",
    "            else:\n",
    "                sec.gnabar_hh = 4 # 2\n",
    "                sec.gkbar_hh =  0.5  # 0.2\n",
    "#                 sec.gbar_Kv1_1 = 30\n",
    "    #         sec.insert('Kdr')\n",
    "    #         sec.insert('NaF')\n",
    "    #         for seg in sec:\n",
    "    #             if TTX or TTX_axon:\n",
    "    #                 seg.NaF.gnabar = 0\n",
    "    #             else:\n",
    "    #                 seg.NaF.gnabar = 0 # 0.1\n",
    "    #             seg.Kdr.gkbar = 0   # 0.5 Potassium conductance in S/cm2\n",
    "\n",
    "\n",
    "    apic_no_syn = ['apic[0]'] #,'apic[12]','apic[13]'\n",
    "#     apic_no_TTX = ['apic[1]']\n",
    "    for sec in h.apical:\n",
    "        if sec.hname() in apic_no_syn:\n",
    "            sec.insert('pas')\n",
    "            sec.g_pas = spe_cond\n",
    "            sec.e_pas = set_init\n",
    "        else:  \n",
    "            sec.insert('hh')\n",
    "            sec.gl_hh = spe_cond\n",
    "            sec.el_hh = set_init  \n",
    "#             sec.insert('cal') \n",
    "            if TTX or TTX_apical: # and sec.hname() not in apic_no_TTX      \n",
    "                sec.gnabar_hh = 0\n",
    "                sec.gkbar_hh =  0\n",
    "#                 sec.gcalbar_cal =  0 \n",
    "            else:\n",
    "                sec.gnabar_hh = 0.1 #   7C 0.033\n",
    "                sec.gkbar_hh =  0.008 #   7C 0.007\n",
    "#                 sec.gcalbar_cal =  0.075\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "make_bse(TTX = False, TTX_axon = False, TTX_apical = False, set_init = -65)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "def run_sim(trial_length = 100, Inh = False, pf_exc_canc_inh = False, Nspk_up_Bspk_down = False,\\\n",
    "            Inh_gmax = 0.04, somatic_inh = False, apical_inh = False, canc_gmax = .00022, stim_dur = 5000, record_conduc = False,\\\n",
    "           continuous_sensory = False, can_opposite = False):\n",
    "\n",
    "\n",
    "    stim = []\n",
    "    stim_all = []\n",
    "    skip_every = 20*0.025\n",
    "    dt = 0.025\n",
    "    ampl_noise = 1/(np.sqrt(dt/4))*np.random.normal(0, .0057, int(stim_dur/skip_every)+1)\n",
    "#     ampl_noise = np.random.lognormal(-3.35, .95, stim_dur)\n",
    "    ampl_noise[0:200] = 0\n",
    "    idx = 1;\n",
    "    for i in np.arange(0,stim_dur,skip_every):\n",
    "        stim = h.IClamp(h.soma(0.5))\n",
    "        stim.delay = i\n",
    "        stim.dur = skip_every\n",
    "        stim.amp = ampl_noise[idx]\n",
    "        idx = idx+1\n",
    "        stim_all.append(stim)\n",
    "\n",
    "        \n",
    "    if continuous_sensory:\n",
    "        if Inh:    \n",
    "            gaba = []\n",
    "            gaba_all = [] \n",
    "            exclude_comp = ['dend[0]']\n",
    "            for sec in h.dend:\n",
    "                if sec.hname() not in exclude_comp:\n",
    "                    for ii in np.arange(10,stim_dur,10):\n",
    "                        gaba = h.GabaASyn(sec(0.45)) \n",
    "                        gaba.onset = ii + np.random.normal(0, 5, 1) \n",
    "                        gaba.gmax = Inh_gmax + .001*np.random.normal(1, 1, 1) # 0.09 \n",
    "                        gaba_all.append(gaba)\n",
    "                \n",
    "        if pf_exc_canc_inh: \n",
    "            ampaAB = []\n",
    "            ampaAB_all = []\n",
    "            exclude_comp = ['apic[0]']\n",
    "            for sec in h.apical:\n",
    "                if sec.hname() not in exclude_comp:\n",
    "   \n",
    "                    for ii in np.arange(10,stim_dur,10):\n",
    "                        ampa_AB = h.AmpaSyn(sec(0.5))  \n",
    "                        ampa_AB.onset = ii + np.random.normal(0, 5, 1) # + np.random.normal(1, 3, 1) (0.2, 3, 1)\n",
    "                        ampa_AB.gmax = canc_gmax + .00002*np.random.normal(1, 2, 1) # .00021 + .00002*np.random.normal(1, 2, 1)\n",
    "                        ampaAB_all.append(ampa_AB)\n",
    "\n",
    "        if Nspk_up_Bspk_down:\n",
    "            gaba = []\n",
    "            gaba_all = []    \n",
    "            exclude_comp = ['dend[0]']\n",
    "            for sec in h.dend:\n",
    "                if sec.hname() not in exclude_comp:\n",
    "                    for ii in np.arange(10,stim_dur,10):\n",
    "                        gaba = h.GabaASyn(sec(0.45)) \n",
    "                        gaba.onset = ii + np.random.normal(0, 5, 1) \n",
    "                        gaba.gmax = 0.18 + .001*np.random.normal(1, 1, 1) # 0.09 \n",
    "                        gaba_all.append(gaba)\n",
    "                    \n",
    "            ampaABE = []\n",
    "            ampaABE_all = []\n",
    "            for sec in h.dend:\n",
    "                for ii in np.arange(10,stim_dur,10):\n",
    "                    ampa_ABE = h.AmpaSyn(sec(0.5)) \n",
    "                    ampa_ABE.onset = ii + np.random.normal(0, 5, 1)\n",
    "                    ampa_ABE.gmax = 0.001 + .00001*np.random.normal(1, 1, 1) #+ .01*np.random.normal(0, 1, 1)   #0.013\n",
    "                    ampaABE_all.append(ampa_ABE)\n",
    "                    \n",
    "            if can_opposite:\n",
    "                ampaAB = []\n",
    "                ampaAB_all = []\n",
    "                exclude_comp = ['apic[0]']\n",
    "                for sec in h.apical:\n",
    "                    if sec.hname() not in exclude_comp:\n",
    "                        for ii in np.arange(10,stim_dur,10):\n",
    "                            ampa_AB = h.AmpaSyn(sec(0.5))  \n",
    "                            ampa_AB.onset = ii + np.random.normal(0, 5, 1) # + np.random.normal(1, 3, 1) (0.2, 3, 1)\n",
    "                            ampa_AB.gmax = canc_gmax \n",
    "                            ampaAB_all.append(ampa_AB)\n",
    "\n",
    "                    \n",
    "        if somatic_inh:    \n",
    "            gaba = []\n",
    "            gaba_all = []\n",
    "            for ii in np.arange(10,stim_dur,10):\n",
    "                gaba = h.GabaASyn(h.soma(0.4))\n",
    "                gaba.onset = ii + np.random.normal(0, 5, 1) \n",
    "                gaba.gmax = Inh_gmax + .001*np.random.normal(1, 1, 1) # 0.09 \n",
    "                gaba_all.append(gaba)\n",
    "        \n",
    "        if apical_inh:   \n",
    "            gaba = []\n",
    "            gaba_all = [] \n",
    "            exclude_comp = ['apic[0]']\n",
    "            for sec in vml:\n",
    "                if sec.hname() not in exclude_comp:\n",
    "                    for ii in np.arange(10,stim_dur,10):\n",
    "                        gaba = h.GabaASyn(sec(0.45)) \n",
    "                        gaba.onset = ii + np.random.normal(0, 5, 1) \n",
    "                        gaba.gmax = Inh_gmax + .001*np.random.normal(1, 1, 1) # 0.09 \n",
    "                        gaba_all.append(gaba)\n",
    "#######             \n",
    "    else:\n",
    "        if Inh:    \n",
    "            gaba = []\n",
    "            gaba_all = [] \n",
    "            for sec in vml:\n",
    "                for ii in np.arange((trial_length/2-3),stim_dur,trial_length):\n",
    "                    gaba = h.GabaASyn(sec(0.45)) \n",
    "                    gaba.onset = ii + np.random.normal(0, 5, 1) \n",
    "                    gaba.gmax = Inh_gmax + .001*np.random.normal(1, 1, 1) # 0.09 \n",
    "                    gaba_all.append(gaba)\n",
    "\n",
    "        if pf_exc_canc_inh: \n",
    "            ampaAB = []\n",
    "            ampaAB_all = []\n",
    "            exclude_comp = ['apic[0]']\n",
    "            for sec in h.apical:\n",
    "                if sec.hname() not in exclude_comp:   \n",
    "                    for ii in np.arange((trial_length/2-3),stim_dur,trial_length):\n",
    "                        ampa_AB = h.AmpaSyn(sec(0.5))  \n",
    "                        ampa_AB.onset = ii + np.random.normal(0, 5, 1) # + np.random.normal(1, 3, 1) (0.2, 3, 1)\n",
    "                        ampa_AB.gmax = canc_gmax + .00002*np.random.normal(1, 2, 1) # .00021 + .00002*np.random.normal(1, 2, 1)\n",
    "                        ampaAB_all.append(ampa_ABE)\n",
    "######\n",
    "        if Nspk_up_Bspk_down:\n",
    "            gaba = []\n",
    "            gaba_all = []    \n",
    "            for sec in vml:\n",
    "                for ii in np.arange((trial_length/2-3),stim_dur,trial_length):\n",
    "                    gaba = h.GabaASyn(sec(0.45)) \n",
    "                    gaba.onset = ii + np.random.normal(0, 2, 1) \n",
    "                    gaba.gmax = 0.2 + .001*np.random.normal(1, 1, 1) # strong 0.05 -- small 0.025 -- Miniscule 0.01\n",
    "                    gaba_all.append(gaba)\n",
    "\n",
    "            ampaABE = []\n",
    "            ampaABE_all = []  \n",
    "            for ii in np.arange((trial_length/2),stim_dur,trial_length):\n",
    "                ampa_ABE = h.AmpaSyn(h.dend[1](0.5)) \n",
    "                ampa_ABE.onset = ii + np.random.normal(0, 4, 1) \n",
    "                ampa_ABE.gmax = 0.02 + .001*np.random.normal(1, 1, 1) #+ .01*np.random.normal(0, 1, 1)   #0.013\n",
    "                ampaABE_all.append(ampa_AB)\n",
    "\n",
    "\n",
    "    dict_all = {}\n",
    "    v_vec_apic = h.Vector()        \n",
    "    v_vec_apic_up = h.Vector()\n",
    "    v_vec_axon = h.Vector()        \n",
    "    v_vec_soma = h.Vector()        \n",
    "    t_vec = h.Vector()             \n",
    "\n",
    "    v_vec_apic.record(h.apic[11](0.5)._ref_v)\n",
    "    v_vec_apic_up.record(h.apic[11](1)._ref_v)\n",
    "    v_vec_axon.record(h.axon[3](0.5)._ref_v)\n",
    "    v_vec_soma.record(h.soma(0.5)._ref_v)\n",
    "    t_vec.record(h._ref_t)\n",
    "    \n",
    "    if record_conduc:\n",
    "        ina_vec_apic = h.Vector()        \n",
    "        ik_vec_apic = h.Vector() \n",
    "        m_vec_apic = h.Vector()        \n",
    "        h_vec_apic = h.Vector() \n",
    "        n_vec_apic = h.Vector() \n",
    "\n",
    "        ina_vec_apic.record(h.apic[11](1)._ref_ina)\n",
    "        ik_vec_apic.record(h.apic[11](1)._ref_ik)\n",
    "        m_vec_apic.record(h.apic[11](1)._ref_m_hh)\n",
    "        h_vec_apic.record(h.apic[11](1)._ref_h_hh)\n",
    "        n_vec_apic.record(h.apic[11](1)._ref_n_hh)\n",
    "\n",
    "    h.tstop = stim_dur\n",
    "    h.run()\n",
    "    dict_all['t_vec'] =  t_vec\n",
    "    dict_all['v_vec_apic'] = v_vec_apic\n",
    "    dict_all['v_vec_apic_up'] = v_vec_apic_up\n",
    "    dict_all['v_vec_axon'] = v_vec_axon\n",
    "    dict_all['v_vec_soma'] = v_vec_soma\n",
    "    \n",
    "    if record_conduc:\n",
    "        dict_all['ina_vec_apic'] = ina_vec_apic        \n",
    "        dict_all['ik_vec_apic'] = ik_vec_apic \n",
    "        dict_all['m_vec_apic'] = m_vec_apic        \n",
    "        dict_all['h_vec_apic'] = h_vec_apic \n",
    "        dict_all['n_vec_apic'] = n_vec_apic \n",
    "\n",
    "    stim = []\n",
    "    stim_all = []\n",
    "\n",
    "    ampaA = []\n",
    "    ampaA_all = []\n",
    "\n",
    "    ampaAAB = []\n",
    "    ampaAAB_all = []\n",
    "\n",
    "    gaba = []\n",
    "    gaba_all = []\n",
    "\n",
    "    ampaAB = []\n",
    "    ampaAB_all = []\n",
    "\n",
    "    ampaABE = []\n",
    "    ampaABE_all = []\n",
    "\n",
    "    return dict_all"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "379.590696811676\n"
     ]
    }
   ],
   "source": [
    "t = time.time()\n",
    "np.random.seed(seed=1)\n",
    "nr_runs = 100\n",
    "stim_dur = 5000\n",
    "trial_length = 100\n",
    "eliminate_trials = 3\n",
    "record_conduc = True\n",
    "continuous_sensory = True\n",
    "\n",
    "Inh = False\n",
    "Inh_gmax = 0 #0.03 \n",
    "pf_exc_canc_inh = False\n",
    "canc_gmax = 0 #.000083\n",
    "Nspk_up_Bspk_down = False\n",
    "after_minus_before = False\n",
    "\n",
    "if Nspk_up_Bspk_down:\n",
    "    Inh = False\n",
    "    pf_exc_canc_inh = False\n",
    "    \n",
    "if pf_exc_canc_inh:\n",
    "    Inh = True\n",
    "    Nspk_up_Bspk_down = False\n",
    "\n",
    "if after_minus_before:\n",
    "    Inh = False\n",
    "    pf_exc_canc_inh = True\n",
    "    Nspk_up_Bspk_down = False\n",
    "    \n",
    "all_runs = []\n",
    "all_runs_apic = []\n",
    "all_runs_apic_up = []\n",
    "all_runs_axon = []\n",
    "\n",
    "all_runs_ina = []\n",
    "all_runs_ik = []\n",
    "all_runs_m = []\n",
    "all_runs_h = []\n",
    "all_runs_n = []\n",
    "\n",
    "for ii in range(nr_runs):\n",
    "    results_all = run_sim(trial_length = trial_length, Inh = Inh, Inh_gmax = Inh_gmax, pf_exc_canc_inh = pf_exc_canc_inh,\\\n",
    "                          canc_gmax = canc_gmax, Nspk_up_Bspk_down = Nspk_up_Bspk_down,\\\n",
    "                          stim_dur = stim_dur, continuous_sensory = continuous_sensory, record_conduc = record_conduc)\n",
    "    all_trials,trial_length_dt = trace_to_trials(results_all['v_vec_soma'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                 trial_length = trial_length)\n",
    "    all_trials_apic,trial_length_dt = trace_to_trials(results_all['v_vec_apic'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                 trial_length = trial_length)\n",
    "    all_trials_apic_up,trial_length_dt = trace_to_trials(results_all['v_vec_apic_up'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                 trial_length = trial_length)\n",
    "    all_trials_axon,trial_length_dt = trace_to_trials(results_all['v_vec_axon'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                 trial_length = trial_length)\n",
    "\n",
    "    all_runs = all_runs+all_trials\n",
    "    all_runs_apic = all_runs_apic+all_trials_apic\n",
    "    all_runs_apic_up = all_runs_apic_up+all_trials_apic_up\n",
    "    all_runs_axon = all_runs_axon+all_trials_axon\n",
    "\n",
    "    if record_conduc:\n",
    "        all_trials_ina,trial_length_dt = trace_to_trials(results_all['ina_vec_apic'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                 trial_length = trial_length)\n",
    "        all_trials_ik,trial_length_dt = trace_to_trials(results_all['ik_vec_apic'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                     trial_length = trial_length)\n",
    "        all_trials_m,trial_length_dt = trace_to_trials(results_all['m_vec_apic'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                     trial_length = trial_length)\n",
    "        all_trials_h,trial_length_dt = trace_to_trials(results_all['h_vec_apic'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                     trial_length = trial_length)\n",
    "        all_trials_n,trial_length_dt = trace_to_trials(results_all['n_vec_apic'].to_python(),eliminate_trials = eliminate_trials,\\\n",
    "                                                     trial_length = trial_length)\n",
    "    \n",
    "    \n",
    "        all_runs_ina = all_runs_ina+all_trials_ina\n",
    "        all_runs_ik = all_runs_ik+all_trials_ik\n",
    "        all_runs_m = all_runs_m+all_trials_m\n",
    "        all_runs_h = all_runs_h+all_trials_h\n",
    "        all_runs_n= all_runs_n+all_trials_n\n",
    "    \n",
    "all_trials = all_runs\n",
    "\n",
    "elapsed = time.time() - t\n",
    "print(elapsed)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "data_baseline_noise = all_runs_apic\n",
    "data_baseline_noise_apic_up = all_runs_apic_up\n",
    "data_baseline_noise_soma = all_runs\n",
    "data_baseline_noise_axon = all_runs_axon\n",
    "data_baseline_ina = all_runs_ina \n",
    "data_baseline_ik = all_runs_ik \n",
    "data_baseline_m = all_runs_m \n",
    "data_baseline_h = all_runs_h \n",
    "data_baseline_n = all_runs_n\n",
    "\n",
    "spk_times_all = turn_trace_to_spk([np.hstack(all_runs)], min_hight_nspk = -60, min_hight_bspk = -47.5,\\\n",
    "                                  prom_nspk = 3, prom_bspk = 15)\n",
    "\n",
    "peaks_nspk_loc_baseline = spk_times_all['peaks_nspk_all']\n",
    "peaks_bspk_loc_baseline = spk_times_all['peaks_bspk_all']\n",
    "nspk_peak_baseline = [x['peak_heights'] for x in spk_times_all['properties_nspk_all']]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# FI CURVE"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "data_soma = [np.hstack(data_baseline_noise_soma)]\n",
    "\n",
    "# data_soma = [np.hstack(data_baseline_noise_soma+data_cancellation_noise_soma+data_inhibition_noise_soma+\\\n",
    "#                       data_soma_inh_noise_soma+data_soma_can_noise_soma)]\n",
    "\n",
    "spk_times_all = turn_trace_to_spk(data_soma, min_hight_nspk = -60, min_hight_bspk = -47.5,\\\n",
    "                                  prom_nspk = 3, prom_bspk = 15)\n",
    "\n",
    "ns_pk_non_evoking = []\n",
    "ns_loc_non_evoking = []\n",
    "nspk_peaks_pre = []\n",
    "ns_loc_pre_bs = []\n",
    "dt = 0.025\n",
    "nspk_before_bspk = np.round(8/dt)\n",
    "peaks_nspk_loc = spk_times_all['peaks_nspk_all']\n",
    "peaks_bspk_loc = spk_times_all['peaks_bspk_all']\n",
    "nspk_peak = [x['peak_heights'] for x in spk_times_all['properties_nspk_all']]\n",
    "nspk_peaks_all = np.concatenate(nspk_peak)\n",
    "if any(peaks_bspk_loc[0]):\n",
    "    nspk_before_idx_all = []\n",
    "    for jj in range(len(peaks_bspk_loc[0])):\n",
    "        nspk_before_idx = np.concatenate(np.where((peaks_nspk_loc[0] > (peaks_bspk_loc[0][jj] - nspk_before_bspk)) & (peaks_nspk_loc[0] < peaks_bspk_loc[0][jj])))\n",
    "        if len(nspk_before_idx) > 0:\n",
    "            nspk_before_idx = nspk_before_idx[-1]\n",
    "            nspk_before_idx_all.append(nspk_before_idx)\n",
    "idx_all = range(len(nspk_peak[0]))\n",
    "nspk_non_before_idx = np.delete(idx_all,nspk_before_idx_all) \n",
    "nspk_peaks_pre.append(nspk_peak[0][nspk_before_idx_all])\n",
    "ns_loc_pre_bs.append(peaks_nspk_loc[0][nspk_before_idx_all])\n",
    "ns_pk_non_evoking.append(nspk_peak[0][nspk_non_before_idx])    \n",
    "ns_loc_non_evoking.append(peaks_nspk_loc[0][nspk_non_before_idx])  \n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/var/folders/lj/190zt78d6zgbjnk6n21cfk180000gn/T/ipykernel_82419/1753302847.py:6: RuntimeWarning: invalid value encountered in true_divide\n",
      "  r_bspk_pk = counts_pre/counts_all\n"
     ]
    }
   ],
   "source": [
    "\n",
    "bin_size = 0.1\n",
    "nbins = np.ceil((np.max(nspk_peaks_all)-np.min(nspk_peaks_all))/bin_size)\n",
    "edges = np.linspace(np.min(nspk_peaks_all),np.max(nspk_peaks_all),num = int(nbins+1))\n",
    "counts_all, bins_all = np.histogram(np.array(nspk_peaks_all,dtype='float'),bins=edges)\n",
    "counts_pre, bins_pre = np.histogram(np.array(nspk_peaks_pre,dtype='float'),bins=edges)\n",
    "r_bspk_pk = counts_pre/counts_all\n",
    "# r_bspk_pk[np.isnan(r_bspk_pk )] = 0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "%matplotlib osx\n",
    "fig = plt.figure(figsize=(3*1.1,3*2))\n",
    "fs = 6\n",
    "ts = 6\n",
    "plt.rc('xtick',labelsize=fs)\n",
    "plt.rc('ytick',labelsize=fs)\n",
    "\n",
    "plt.plot(edges[1:],r_bspk_pk,'ok',markersize = 2)\n",
    "plt.xlabel('peak potential (mV)',fontsize = fs)\n",
    "plt.ylabel('p[broad spike]',fontsize = fs)\n",
    "\n",
    "ax = plt.gca()\n",
    "ax.tick_params(direction='in', length=2, width=0.5, colors='k',\n",
    "               grid_color='k', grid_alpha=1)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# AMP analysis"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "dt = 0.025\n",
    "time_before_pk = int(np.round(0.7/dt))\n",
    "min_der = 0.08\n",
    "add_ind = 1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# %matplotlib osx\n",
    "\n",
    "fs = 6\n",
    "ts = 6\n",
    "plt.rc('xtick',labelsize=fs)\n",
    "plt.rc('ytick',labelsize=fs)\n",
    "\n",
    "nr_hist = 200\n",
    "x_range = [-65,-52]\n",
    "y_range = [0,0.15]\n",
    "\n",
    "\n",
    "titles = ['baseline','inhib.','cancel.','opposite','oppos_can']\n",
    "sites = ['soma','apic','apic up']\n",
    "colr = ['k','r','m']\n",
    "\n",
    "dt = 0.025\n",
    "loc_up_apic = 0.12/dt\n",
    "loc_up_apic_up = 0.25/dt\n",
    "base_nspk = int(np.round(0.675/dt))\n",
    "\n",
    "\n",
    "dt = 0.025\n",
    "nspk_before_bspk = np.round(8/dt)\n",
    "bin_size = 0.1\n",
    "min_ind = 1\n",
    "\n",
    "slope_intercept_all = np.zeros((3,2))\n",
    "mean_AMP_MP_all = np.zeros((3,2))\n",
    "mean_AMP_evoke_non_all = np.zeros((3,2))\n",
    "\n",
    "use_basil_inh = True\n",
    "use_soma_inh = False\n",
    "use_apic_inh = False\n",
    "\n",
    "show_apic = False\n",
    "fig = plt.figure(figsize=(2,2))\n",
    "\n",
    "MP_all = np.empty(0)\n",
    "AMP_all = np.empty(0)\n",
    "clr_all = np.empty(0)\n",
    "\n",
    "for kk in range(1):\n",
    "    \n",
    "    if kk == 0:\n",
    "        data_apic_up = [np.hstack(data_baseline_noise_apic_up)]\n",
    "        data_soma = [np.hstack(data_baseline_noise_soma)]\n",
    "        peaks_nspk_loc = peaks_nspk_loc_baseline\n",
    "        peaks_bspk_loc = peaks_bspk_loc_baseline\n",
    "        nspk_peak = nspk_peak_baseline\n",
    "    if kk>0:\n",
    "        if use_basil_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_inhibition_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_inhibition_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_inhibition\n",
    "                peaks_bspk_loc = peaks_bspk_loc_inhibition\n",
    "                nspk_peak = nspk_peak_inhibition\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_cancellation_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_cancellation_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_cancellation\n",
    "                peaks_bspk_loc = peaks_bspk_loc_cancellation\n",
    "                nspk_peak = nspk_peak_cancellation\n",
    "        elif use_apic_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_apic_inh_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_apic_inh_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_apic_inh\n",
    "                peaks_bspk_loc = peaks_bspk_loc_apic_inh\n",
    "                nspk_peak = nspk_peak_apic_inh\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_apic_can_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_apic_can_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_apic_can\n",
    "                peaks_bspk_loc = peaks_bspk_loc_apic_can\n",
    "                nspk_peak = nspk_peak_apic_can\n",
    "        elif use_soma_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_soma_inh_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_soma_inh_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_soma_inh\n",
    "                peaks_bspk_loc = peaks_bspk_loc_soma_inh\n",
    "                nspk_peak = nspk_peak_soma_inh\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_soma_can_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_soma_can_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_soma_can\n",
    "                peaks_bspk_loc = peaks_bspk_loc_soma_can\n",
    "                nspk_peak = nspk_peak_soma_can\n",
    "\n",
    "        \n",
    "        \n",
    "    if show_apic:\n",
    "        if kk == 0 or kk == 1:\n",
    "            min_pk_nsp = -60\n",
    "        elif kk == 2:\n",
    "            min_pk_nsp = -58.5\n",
    "            \n",
    "        spk_times_all = turn_trace_to_spk(data_apic_up, min_hight_nspk = min_pk_nsp, min_hight_bspk = -47.5,\\\n",
    "                                          prom_nspk = 1.5, prom_bspk = 15)\n",
    "        peaks_nspk_loc_apic_up = np.concatenate(spk_times_all['peaks_nspk_all'])\n",
    "        peaks_bspk_loc = (spk_times_all['peaks_bspk_all'])\n",
    "        nspk_peaks_all_apic_up = np.concatenate([x['peak_heights'] for x in spk_times_all['properties_nspk_all']])\n",
    "        bspk_peaks_all = ([x['peak_heights'] for x in spk_times_all['properties_bspk_all']])\n",
    "        \n",
    "        data_rel = np.concatenate(data_apic_up)\n",
    "        PKS = nspk_peaks_all_apic_up\n",
    "        peaks_loc_rel = peaks_nspk_loc_apic_up    \n",
    "    \n",
    "    else:\n",
    "        data_rel = np.concatenate(data_soma)\n",
    "                \n",
    "        PKS = np.concatenate(nspk_peak) \n",
    "        peaks_loc_rel = np.concatenate(peaks_nspk_loc)\n",
    "        \n",
    "       \n",
    "\n",
    "    fst_dev = np.diff(data_rel,1)\n",
    "    fst_dev = np.insert(fst_dev,0, 0)\n",
    "\n",
    "    mp_nspk_locs = []\n",
    "    idx_list = []\n",
    "    for ii in range(len(peaks_loc_rel)):  \n",
    "        rel_idx = [peaks_loc_rel[ii]-time_before_pk+add_ind+np.where(fst_dev[peaks_loc_rel[ii]-time_before_pk:peaks_loc_rel[ii]]>min_der)[0]]\n",
    "        if len(rel_idx[0])>0:\n",
    "            mp_nspk_locs.append(rel_idx[0][0])\n",
    "        else:\n",
    "            print(ii)\n",
    "            idx_list.append(ii)\n",
    "    PKS = np.delete(PKS,idx_list)  \n",
    "    peaks_loc_rel = np.delete(peaks_loc_rel,idx_list)  \n",
    "    \n",
    "    MP = data_rel[mp_nspk_locs]\n",
    "    AMP = PKS-MP\n",
    "    \n",
    "    mean_AMP_MP_all[kk,0] = np.mean(PKS)\n",
    "    mean_AMP_MP_all[kk,1] = np.mean(MP)\n",
    "\n",
    "    ns_pk_non_evoking = []\n",
    "    ns_mp_non_evoking = []\n",
    "    ns_amp_non_evoking = []\n",
    "    ns_loc_non_evoking = []\n",
    "    ns_pk_pre = []\n",
    "    ns_mp_pre = []\n",
    "    ns_amp_pre = []\n",
    "    ns_loc_pre_bs = []\n",
    "    if any(peaks_bspk_loc[0]):\n",
    "        nspk_before_idx_all = []\n",
    "        for jj in range(len(peaks_bspk_loc[0])):\n",
    "            nspk_before_idx = np.concatenate(np.where((peaks_loc_rel > (peaks_bspk_loc[0][jj] - nspk_before_bspk)) & (peaks_loc_rel < peaks_bspk_loc[0][jj])))\n",
    "            if len(nspk_before_idx) > 0:\n",
    "                nspk_before_idx = nspk_before_idx[-1]\n",
    "                nspk_before_idx_all.append(nspk_before_idx)\n",
    "        idx_all = range(len(PKS))\n",
    "        nspk_non_before_idx = np.delete(idx_all,nspk_before_idx_all) \n",
    "        ns_pk_pre.append(PKS[nspk_before_idx_all])\n",
    "        ns_loc_pre_bs.append(peaks_loc_rel[nspk_before_idx_all])\n",
    "        ns_pk_non_evoking.append(PKS[nspk_non_before_idx])    \n",
    "        ns_loc_non_evoking.append(peaks_loc_rel[nspk_non_before_idx])  \n",
    "\n",
    "        ns_mp_pre.append(MP[nspk_before_idx_all])\n",
    "        ns_mp_non_evoking.append(MP[nspk_non_before_idx])    \n",
    "        ns_amp_pre.append(AMP[nspk_before_idx_all])\n",
    "        ns_amp_non_evoking.append(AMP[nspk_non_before_idx])\n",
    "        \n",
    "\n",
    "        ns_mp_pre_conc = np.concatenate(ns_mp_pre);\n",
    "        ns_amp_pre_conc = np.concatenate(ns_amp_pre);\n",
    "        ns_mp_non_evoking_conc = np.concatenate(ns_mp_non_evoking)\n",
    "        ns_amp_non_evoking_conc = np.concatenate(ns_amp_non_evoking)\n",
    "\n",
    "        nbins = np.ceil((np.max(MP)-np.min(MP))/bin_size)\n",
    "        edges = np.linspace(np.min(MP),np.max(MP),num = int(nbins+1))\n",
    "        inds_non_evoke = np.digitize(np.array(ns_mp_non_evoking_conc ,dtype='float'), bins=edges)\n",
    "        mean_amp_non_evoke = np.empty((np.size(edges)))\n",
    "        mean_amp_non_evoke[:] = np.NaN\n",
    "        for ii in range(len(edges)):\n",
    "            if np.sum(inds_non_evoke == ii) > min_ind:\n",
    "                mean_amp_non_evoke[ii] =  np.mean(ns_amp_non_evoking_conc[inds_non_evoke == ii])                  \n",
    "        inds_evoke = np.digitize(np.array(ns_mp_pre_conc ,dtype='float'), bins=edges)\n",
    "        mean_amp_evoke = np.empty((np.size(edges)))\n",
    "        mean_amp_evoke[:] = np.NaN\n",
    "        for ii in range(len(edges)):\n",
    "            if np.sum(inds_evoke == ii) > min_ind:\n",
    "                mean_amp_evoke[ii] =  np.mean(ns_amp_pre_conc[inds_evoke == ii])                  \n",
    "        \n",
    "        mean_AMP_evoke_non_all[kk,0] = np.mean(ns_amp_pre)\n",
    "        mean_AMP_evoke_non_all[kk,1] = np.mean(ns_amp_non_evoking)\n",
    "    else:\n",
    "        nbins = np.ceil((np.max(MP)-np.min(MP))/bin_size)\n",
    "        edges = np.linspace(np.min(MP),np.max(MP),num = int(nbins+1))\n",
    "        counts_all, bins_all = np.histogram(np.array(MP,dtype='float'),bins=edges)\n",
    "        inds_non_evoke = np.digitize(np.array(MP,dtype='float'), bins=edges)\n",
    "        mean_amp_non_evoke = np.empty((np.size(edges)))\n",
    "        mean_amp_non_evoke[:] = np.NaN\n",
    "        for ii in range(len(edges)):\n",
    "            if np.sum(inds_non_evoke == ii) > min_ind:\n",
    "                mean_amp_non_evoke[ii] =  np.mean(AMP[inds_non_evoke == ii])                  \n",
    "        mean_amp_evoke = np.empty((np.size(edges)))\n",
    "        mean_amp_evoke[:] = np.NaN\n",
    "        ind_evoke = np.empty((np.size(edges)))\n",
    "    \n",
    "    \n",
    "    \n",
    "    X_MP  = np.transpose([MP])\n",
    "    model_cell = LinearRegression(fit_intercept=True).fit(X_MP, np.transpose(AMP))\n",
    "    \n",
    "    plt.scatter(edges[4:54],mean_amp_non_evoke[4:54],s=10, color='k')\n",
    "    plt.scatter(edges[4:54],mean_amp_evoke[4:54],s=10, facecolors='none', edgecolors='k')\n",
    "    plt.legend(['non-evoking','evoking'],frameon = False, prop={'size': fs})\n",
    "    plt.tight_layout()\n",
    "    plt.ylabel('amplitude (mV)', fontsize = fs)\n",
    "    plt.xlabel('baseline membrane potential (mV)', fontsize = fs)\n",
    "\n",
    "    ax = plt.gca()\n",
    "    ax.tick_params(direction='in', length=2, width=0.5, colors='k',\n",
    "                   grid_color='k', grid_alpha=1)\n",
    "\n",
    "plt.show()\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "13.483453810895142\n",
      "12.36405390443785\n"
     ]
    }
   ],
   "source": [
    "print(np.mean(ns_amp_pre))\n",
    "print(np.mean(ns_amp_non_evoking))\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# PEAK vs MP"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "%matplotlib osx\n",
    "\n",
    "fs = 6\n",
    "ts = 6\n",
    "plt.rc('xtick',labelsize=fs)\n",
    "plt.rc('ytick',labelsize=fs)\n",
    "\n",
    "nr_hist = 200\n",
    "x_range = [-65,-52]\n",
    "y_range = [0,0.15]\n",
    "\n",
    "\n",
    "titles = ['baseline','inhib.','cancel.','opposite','oppos_can']\n",
    "sites = ['soma','apic','apic up']\n",
    "colr = ['k','r','m']\n",
    "\n",
    "dt = 0.025\n",
    "loc_up_apic = 0.12/dt\n",
    "loc_up_apic_up = 0.25/dt\n",
    "base_nspk = int(np.round(0.675/dt))\n",
    "\n",
    "\n",
    "dt = 0.025\n",
    "nspk_before_bspk = np.round(8/dt)\n",
    "bin_size = 0.1\n",
    "min_ind = 2\n",
    "\n",
    "slope_intercept_all = np.zeros((3,2))\n",
    "mean_AMP_MP_all = np.zeros((3,2))\n",
    "mean_AMP_evoke_non_all = np.zeros((3,2))\n",
    "\n",
    "use_basil_inh = True\n",
    "use_soma_inh = False\n",
    "use_apic_inh = False\n",
    "\n",
    "show_apic = False\n",
    "fig = plt.figure(figsize=(2,2))\n",
    "\n",
    "MP_all = np.empty(0)\n",
    "AMP_all = np.empty(0)\n",
    "clr_all = np.empty(0)\n",
    "\n",
    "for kk in range(1):\n",
    "    \n",
    "    if kk == 0:\n",
    "        data_apic_up = [np.hstack(data_baseline_noise_apic_up)]\n",
    "        data_soma = [np.hstack(data_baseline_noise_soma)]\n",
    "        peaks_nspk_loc = peaks_nspk_loc_baseline\n",
    "        peaks_bspk_loc = peaks_bspk_loc_baseline\n",
    "        nspk_peak = nspk_peak_baseline\n",
    "    if kk>0:\n",
    "        if use_basil_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_inhibition_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_inhibition_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_inhibition\n",
    "                peaks_bspk_loc = peaks_bspk_loc_inhibition\n",
    "                nspk_peak = nspk_peak_inhibition\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_cancellation_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_cancellation_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_cancellation\n",
    "                peaks_bspk_loc = peaks_bspk_loc_cancellation\n",
    "                nspk_peak = nspk_peak_cancellation\n",
    "        elif use_apic_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_apic_inh_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_apic_inh_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_apic_inh\n",
    "                peaks_bspk_loc = peaks_bspk_loc_apic_inh\n",
    "                nspk_peak = nspk_peak_apic_inh\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_apic_can_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_apic_can_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_apic_can\n",
    "                peaks_bspk_loc = peaks_bspk_loc_apic_can\n",
    "                nspk_peak = nspk_peak_apic_can\n",
    "        elif use_soma_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_soma_inh_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_soma_inh_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_soma_inh\n",
    "                peaks_bspk_loc = peaks_bspk_loc_soma_inh\n",
    "                nspk_peak = nspk_peak_soma_inh\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_soma_can_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_soma_can_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_soma_can\n",
    "                peaks_bspk_loc = peaks_bspk_loc_soma_can\n",
    "                nspk_peak = nspk_peak_soma_can\n",
    "\n",
    "        \n",
    "        \n",
    "    if show_apic:\n",
    "        if kk == 0 or kk == 1:\n",
    "            min_pk_nsp = -60\n",
    "        elif kk == 2:\n",
    "            min_pk_nsp = -58.5\n",
    "            \n",
    "        spk_times_all = turn_trace_to_spk(data_apic_up, min_hight_nspk = min_pk_nsp, min_hight_bspk = -47.5,\\\n",
    "                                          prom_nspk = 1.5, prom_bspk = 15)\n",
    "        peaks_nspk_loc_apic_up = np.concatenate(spk_times_all['peaks_nspk_all'])\n",
    "        peaks_bspk_loc = (spk_times_all['peaks_bspk_all'])\n",
    "        nspk_peaks_all_apic_up = np.concatenate([x['peak_heights'] for x in spk_times_all['properties_nspk_all']])\n",
    "        bspk_peaks_all = ([x['peak_heights'] for x in spk_times_all['properties_bspk_all']])\n",
    "        \n",
    "        data_rel = np.concatenate(data_apic_up)\n",
    "        PKS = nspk_peaks_all_apic_up\n",
    "        peaks_loc_rel = peaks_nspk_loc_apic_up    \n",
    "    \n",
    "    else:\n",
    "        data_rel = np.concatenate(data_soma)\n",
    "\n",
    "        \n",
    "        PKS = np.concatenate(nspk_peak) \n",
    "        peaks_loc_rel = np.concatenate(peaks_nspk_loc)\n",
    "        \n",
    "       \n",
    "\n",
    "    fst_dev = np.diff(data_rel,1)\n",
    "    fst_dev = np.insert(fst_dev,0, 0)\n",
    "\n",
    "    mp_nspk_locs = []\n",
    "    idx_list = []\n",
    "    for ii in range(len(peaks_loc_rel)):  \n",
    "        rel_idx = [peaks_loc_rel[ii]-time_before_pk+add_ind+np.where(fst_dev[peaks_loc_rel[ii]-time_before_pk:peaks_loc_rel[ii]]>min_der)[0]]\n",
    "        if len(rel_idx[0])>0:\n",
    "            mp_nspk_locs.append(rel_idx[0][0])\n",
    "        else:\n",
    "            print(ii)\n",
    "            idx_list.append(ii)\n",
    "\n",
    "            PKS = np.delete(PKS,idx_list)  \n",
    "    peaks_loc_rel = np.delete(peaks_loc_rel,idx_list)  \n",
    "\n",
    "    MP = data_rel[mp_nspk_locs]\n",
    "    AMP = PKS-MP\n",
    "    \n",
    "    mean_AMP_MP_all[kk,0] = np.mean(PKS)\n",
    "    mean_AMP_MP_all[kk,1] = np.mean(MP)\n",
    "\n",
    "    ns_pk_non_evoking = []\n",
    "    ns_mp_non_evoking = []\n",
    "    ns_amp_non_evoking = []\n",
    "    ns_loc_non_evoking = []\n",
    "    ns_pk_pre = []\n",
    "    ns_mp_pre = []\n",
    "    ns_amp_pre = []\n",
    "    ns_loc_pre_bs = []\n",
    "    if any(peaks_bspk_loc[0]):\n",
    "        nspk_before_idx_all = []\n",
    "        for jj in range(len(peaks_bspk_loc[0])):\n",
    "            nspk_before_idx = np.concatenate(np.where((peaks_loc_rel > (peaks_bspk_loc[0][jj] - nspk_before_bspk)) & (peaks_loc_rel < peaks_bspk_loc[0][jj])))\n",
    "            if len(nspk_before_idx) > 0:\n",
    "                nspk_before_idx = nspk_before_idx[-1]\n",
    "                nspk_before_idx_all.append(nspk_before_idx)\n",
    "        idx_all = range(len(PKS))\n",
    "        nspk_non_before_idx = np.delete(idx_all,nspk_before_idx_all) \n",
    "        ns_pk_pre.append(PKS[nspk_before_idx_all])\n",
    "        ns_loc_pre_bs.append(peaks_loc_rel[nspk_before_idx_all])\n",
    "        ns_pk_non_evoking.append(PKS[nspk_non_before_idx])    \n",
    "        ns_loc_non_evoking.append(peaks_loc_rel[nspk_non_before_idx])  \n",
    "\n",
    "        ns_mp_pre.append(MP[nspk_before_idx_all])\n",
    "        ns_mp_non_evoking.append(MP[nspk_non_before_idx])    \n",
    "        ns_amp_pre.append(AMP[nspk_before_idx_all])\n",
    "        ns_amp_non_evoking.append(AMP[nspk_non_before_idx])        \n",
    "\n",
    "        ns_mp_pre_conc = np.concatenate(ns_mp_pre);\n",
    "        ns_amp_pre_conc = np.concatenate(ns_amp_pre);\n",
    "        ns_pk_pre_conc = np.concatenate(ns_pk_pre);\n",
    "        ns_mp_non_evoking_conc = np.concatenate(ns_mp_non_evoking)\n",
    "        ns_amp_non_evoking_conc = np.concatenate(ns_amp_non_evoking)\n",
    "        ns_pk_non_evoking_conc = np.concatenate(ns_pk_non_evoking)\n",
    "\n",
    "        nbins = np.ceil((np.max(MP)-np.min(MP))/bin_size)\n",
    "        edges = np.linspace(np.min(MP),np.max(MP),num = int(nbins+1))\n",
    "        inds_non_evoke = np.digitize(np.array(ns_mp_non_evoking_conc ,dtype='float'), bins=edges)\n",
    "        mean_pk_non_evoke = np.empty((np.size(edges)))\n",
    "        mean_pk_non_evoke[:] = np.NaN\n",
    "        for ii in range(len(edges)):\n",
    "            if np.sum(inds_non_evoke == ii) > min_ind:\n",
    "                mean_pk_non_evoke[ii] =  np.mean(ns_pk_non_evoking_conc[inds_non_evoke == ii])                  \n",
    "        inds_evoke = np.digitize(np.array(ns_mp_pre_conc ,dtype='float'), bins=edges)\n",
    "        mean_pk_evoke = np.empty((np.size(edges)))\n",
    "        mean_pk_evoke[:] = np.NaN\n",
    "        for ii in range(len(edges)):\n",
    "            if np.sum(inds_evoke == ii) > min_ind:\n",
    "                mean_pk_evoke[ii] =  np.mean(ns_pk_pre_conc[inds_evoke == ii])                  \n",
    "        \n",
    "    else:\n",
    "        nbins = np.ceil((np.max(MP)-np.min(MP))/bin_size)\n",
    "        edges = np.linspace(np.min(MP),np.max(MP),num = int(nbins+1))\n",
    "        counts_all, bins_all = np.histogram(np.array(MP,dtype='float'),bins=edges)\n",
    "        inds_non_evoke = np.digitize(np.array(MP,dtype='float'), bins=edges)\n",
    "        mean_pk_non_evoke = np.empty((np.size(edges)))\n",
    "        mean_pk_non_evoke[:] = np.NaN\n",
    "        for ii in range(len(edges)):\n",
    "            if np.sum(inds_non_evoke == ii) > min_ind:\n",
    "                mean_pk_non_evoke[ii] =  np.mean(PKS[inds_non_evoke == ii])                  \n",
    "        mean_pk_evoke = np.empty((np.size(edges)))\n",
    "        mean_pk_evoke[:] = np.NaN\n",
    "        ind_evoke = np.empty((np.size(edges)))\n",
    "    \n",
    "    \n",
    "    \n",
    "    X_MP  = np.transpose([MP])\n",
    "    model_cell = LinearRegression(fit_intercept=True).fit(X_MP, np.transpose(PKS))\n",
    "    \n",
    "    plt.scatter(edges[4:54],mean_pk_non_evoke[4:54],s=10, color='k')\n",
    "    plt.scatter(edges[4:54],mean_pk_evoke[4:54],s=10, facecolors='none', edgecolors='k')\n",
    "    plt.legend(['non-evoking','evoking'],frameon = False, prop={'size': fs})\n",
    "    plt.tight_layout()\n",
    "    plt.ylabel('peak (mV)', fontsize = fs)\n",
    "    plt.xlabel('baseline membrane potential (mV)', fontsize = fs)\n",
    "\n",
    "    ax = plt.gca()\n",
    "    ax.tick_params(direction='in', length=2, width=0.5, colors='k',\n",
    "                   grid_color='k', grid_alpha=1)\n",
    "\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# H-H analysis"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "nr nspk =  2555.0\n",
      "nr bspk =  108.0\n"
     ]
    }
   ],
   "source": [
    "percentage_evoking_ramp = np.zeros(3)\n",
    "\n",
    "mean_peak = np.zeros((3,3))\n",
    "std_peak_all = np.zeros((3,3))\n",
    "\n",
    "mean_peak_evoking = np.zeros((3,3))\n",
    "std_peak_evoking = np.zeros((3,3))\n",
    "\n",
    "mean_peak_non_ramp = np.zeros((3,3))\n",
    "std_peak_non_ramp = np.zeros((3,3))\n",
    "\n",
    "titles = ['baseline','cancellation','inhibition']\n",
    "thresh_val_all = np.zeros(3)\n",
    "thresh_val_all_apic_up = np.zeros(3)\n",
    "\n",
    "p_thresh_all = np.zeros(3)\n",
    "p_thresh_all_apic_up = np.zeros(3)\n",
    "ratio_all = np.zeros(3)\n",
    "nr_nspk = np.zeros(3)\n",
    "nr_bspk = np.zeros(3)\n",
    "\n",
    "dt = 0.025\n",
    "loc_up_apic_up = 0.65/dt\n",
    "\n",
    "for kk in range(1):\n",
    "\n",
    "    if kk == 0:\n",
    "        data_apic = [np.hstack(data_baseline_noise)]\n",
    "        data_apic_up = [np.hstack(data_baseline_noise_apic_up)]\n",
    "        data_soma = [np.hstack(data_baseline_noise_soma)]\n",
    "        data_ina = [np.hstack(data_baseline_ina)] \n",
    "        data_ik = [np.hstack(data_baseline_ik)] \n",
    "        data_m = [np.hstack(data_baseline_m)] \n",
    "        data_h = [np.hstack(data_baseline_h)] \n",
    "        data_n = [np.hstack(data_baseline_n)]\n",
    "\n",
    "    elif kk == 1:    \n",
    "        data_apic = [np.hstack(data_cancellation_noise)]\n",
    "        data_apic_up = [np.hstack(data_cancellation_noise_apic_up)]\n",
    "        data_soma = [np.hstack(data_cancellation_noise_soma)]\n",
    "        data_ina = [np.hstack(data_cancellation_ina)] \n",
    "        data_ik = [np.hstack(data_cancellation_ik)] \n",
    "        data_m = [np.hstack(data_cancellation_m)] \n",
    "        data_h = [np.hstack(data_cancellation_h)] \n",
    "        data_n = [np.hstack(data_cancellation_n)]\n",
    "    elif kk == 2:    \n",
    "        data_apic = [np.hstack(data_inhibition_noise)]\n",
    "        data_apic_up = [np.hstack(data_inhibition_noise_apic_up)]\n",
    "        data_soma = [np.hstack(data_inhibition_noise_soma)]\n",
    "        data_ina = [np.hstack(data_inhibition_ina)] \n",
    "        data_ik = [np.hstack(data_inhibition_ik)] \n",
    "        data_m = [np.hstack(data_inhibition_m)] \n",
    "        data_h = [np.hstack(data_inhibition_h)] \n",
    "        data_n = [np.hstack(data_inhibition_n)]\n",
    "        \n",
    "    spk_times_all = turn_trace_to_spk(data_soma, min_hight_nspk = -59, min_hight_bspk = -47.5,\\\n",
    "                                      prom_nspk = 3, prom_bspk = 15)\n",
    "    \n",
    "    nr_bspk_two_nspk = 0\n",
    "    \n",
    "    ns_pk_non_evoking = []\n",
    "    ns_loc_non_evoking = []\n",
    "    nspk_peaks_pre = []\n",
    "    ns_loc_pre_bs = []\n",
    "    dt = 0.025\n",
    "    nspk_before_bspk = np.round(8/dt)\n",
    "    peaks_nspk_loc = spk_times_all['peaks_nspk_all']\n",
    "    peaks_bspk_loc = spk_times_all['peaks_bspk_all']\n",
    "    nspk_peak = [x['peak_heights'] for x in spk_times_all['properties_nspk_all']]\n",
    "    nspk_peaks_all = np.concatenate(nspk_peak)\n",
    "    \n",
    "    peaks_nspk_loc = np.concatenate(peaks_nspk_loc)\n",
    "    \n",
    "    fst_dev = np.diff(np.concatenate(data_apic_up),1)\n",
    "    fst_dev = np.insert(fst_dev,0, 0)\n",
    "\n",
    "    mp_nspk_locs = []\n",
    "    idx_list = []\n",
    "    for ii in range(len(peaks_nspk_loc)):  \n",
    "        rel_idx = [peaks_nspk_loc[ii]-time_before_pk+add_ind+np.where(fst_dev[peaks_nspk_loc[ii]-time_before_pk:peaks_nspk_loc[ii]]>min_der)[0]]\n",
    "        if len(rel_idx[0])>0:\n",
    "            mp_nspk_locs.append(rel_idx[0][0])\n",
    "        else:\n",
    "            print(ii)\n",
    "            idx_list.append(ii)\n",
    "#         \n",
    "    nspk_peaks_all = np.delete(nspk_peaks_all,idx_list)  \n",
    "    peaks_nspk_loc = np.delete(peaks_nspk_loc,idx_list)  \n",
    "        \n",
    "#     mp_nspk_loc = np.array([peaks_nspk_loc[ii]-time_before_pk-1+np.where(fst_dev[peaks_nspk_loc[ii]-time_before_pk:peaks_nspk_loc[ii]]>min_der)[0][0] for ii in range(len(peaks_nspk_loc))],dtype=int)\n",
    "    MP = data_apic_up[0][mp_nspk_locs]\n",
    "    AMP = nspk_peaks_all-MP\n",
    " \n",
    "    \n",
    "    if any(peaks_bspk_loc[0]):\n",
    "        nspk_before_idx_all = []\n",
    "        for jj in range(len(peaks_bspk_loc[0])):\n",
    "            nspk_before_idx = np.concatenate(np.where((peaks_nspk_loc > (peaks_bspk_loc[0][jj] - nspk_before_bspk)) & (peaks_nspk_loc < peaks_bspk_loc[0][jj])))\n",
    "            if len(nspk_before_idx) > 0:\n",
    "                if len(nspk_before_idx) > 1:\n",
    "                    nr_bspk_two_nspk = nr_bspk_two_nspk+1    \n",
    "                nspk_before_idx = nspk_before_idx[-1]\n",
    "                nspk_before_idx_all.append(nspk_before_idx)\n",
    "            else:\n",
    "                print('bad')\n",
    "\n",
    "    idx_all = range(len(nspk_peaks_all))\n",
    "    nspk_non_before_idx = np.delete(idx_all,nspk_before_idx_all) \n",
    "    nspk_peaks_pre.append(nspk_peak[0][nspk_before_idx_all])\n",
    "    ns_loc_pre_bs.append(peaks_nspk_loc[nspk_before_idx_all])\n",
    "    ns_pk_non_evoking.append(nspk_peak[0][nspk_non_before_idx])    \n",
    "    ns_loc_non_evoking.append(peaks_nspk_loc[nspk_non_before_idx])  \n",
    "            \n",
    "                     \n",
    "\n",
    "    nr_nspk[kk] = len(peaks_nspk_loc)\n",
    "    print('nr nspk = ', nr_nspk[kk])\n",
    "    nr_bspk[kk] = len(np.concatenate(peaks_bspk_loc))\n",
    "    print('nr bspk = ', nr_bspk[kk])\n",
    "    \n",
    "    thresh = 0.1\n",
    "    \n",
    "    thresh_val_all[kk] = np.quantile(nspk_peaks_pre,thresh)\n",
    "\n",
    "    above_thresh_idx = np.where(nspk_peaks_all > thresh_val_all[kk])\n",
    "    evoke_idx = nspk_before_idx_all\n",
    "#     evoke_idx = np.intersect1d(above_thresh_idx,nspk_before_idx_all,assume_unique=True)\n",
    "    non_evoke_idx = np.setdiff1d(above_thresh_idx,nspk_before_idx_all, assume_unique=True)\n",
    "    \n",
    "#     evoke_idx = np.asarray(nspk_before_idx_all)\n",
    "#     non_evoke_idx = nspk_non_before_idx\n",
    "\n",
    "    peak_evoke = nspk_peaks_all[evoke_idx]\n",
    "    peak_non_evoke = nspk_peaks_all[non_evoke_idx]\n",
    "    peak_above_thresh = nspk_peaks_all[above_thresh_idx]\n",
    "\n",
    "    amp_evoke = AMP[evoke_idx]\n",
    "    amp_non_evoke = AMP[non_evoke_idx]\n",
    "    amp_above_thresh = AMP[above_thresh_idx]\n",
    "\n",
    "    ina_peaks_all = data_ina[0][peaks_nspk_loc+int(loc_up_apic_up)]\n",
    "    ik_peaks_all = data_ik[0][peaks_nspk_loc+int(loc_up_apic_up)]\n",
    "    m_peaks_all = data_m[0][peaks_nspk_loc+int(loc_up_apic_up)]\n",
    "    h_peaks_all = data_h[0][peaks_nspk_loc+int(loc_up_apic_up)]\n",
    "    n_peaks_all = data_n[0][peaks_nspk_loc+int(loc_up_apic_up)]\n",
    "\n",
    "    ina_evoke = ina_peaks_all[evoke_idx]\n",
    "    ina_non_evoke = ina_peaks_all[non_evoke_idx]    \n",
    "    ik_evoke = ik_peaks_all[evoke_idx]\n",
    "    ik_non_evoke = ik_peaks_all[non_evoke_idx]\n",
    "    m_evoke = m_peaks_all[evoke_idx]\n",
    "    m_non_evoke = m_peaks_all[non_evoke_idx]\n",
    "    m_above_thresh = m_peaks_all[above_thresh_idx]\n",
    "    h_evoke = h_peaks_all[evoke_idx]\n",
    "    h_non_evoke = h_peaks_all[non_evoke_idx]\n",
    "    h_above_thresh = h_peaks_all[above_thresh_idx]\n",
    "    n_evoke = n_peaks_all[evoke_idx]\n",
    "    n_non_evoke = n_peaks_all[non_evoke_idx]\n",
    "    n_above_thresh = n_peaks_all[above_thresh_idx]\n",
    "\n",
    "    ### fits\n",
    "    \n",
    "    evoke = np.column_stack((m_evoke,h_evoke))\n",
    "    a = np.ones(len(m_evoke),dtype=int)\n",
    "    non_evoke = np.column_stack((m_non_evoke,h_non_evoke))\n",
    "    b = np.zeros(len(m_non_evoke),dtype=int)\n",
    "    all_mh = np.concatenate((evoke,non_evoke))\n",
    "    all_clas = np.concatenate((a,b))\n",
    "    \n",
    "    X = all_mh\n",
    "    y = all_clas\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {},
   "outputs": [],
   "source": [
    "slope_pk = np.zeros(2)\n",
    "slope_amp = np.zeros(2)\n",
    "\n",
    "%matplotlib osx\n",
    "\n",
    "fig = plt.figure(figsize=(3,3))\n",
    "\n",
    "fs = 6\n",
    "ts = 6\n",
    "plt.rc('xtick',labelsize=fs)\n",
    "plt.rc('ytick',labelsize=fs)\n",
    "\n",
    "x_range = [-62,-45]\n",
    "y_range = [0,0.15]\n",
    "if kk == 0:\n",
    "    x_min, x_max = X[:, 0].min() - .01, X[:, 0].max() + .01\n",
    "    y_min, y_max = X[:, 1].min() - .01, X[:, 1].max() + .03\n",
    "    h = .02 # step size\n",
    "    xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n",
    "                         np.arange(y_min, y_max, h))\n",
    "\n",
    "if kk == 0:    \n",
    "    clf = SVC(gamma='scale',C=1000)\n",
    "    clf.fit(X, y)\n",
    "    Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\n",
    "    Z = Z.reshape(xx.shape)\n",
    "\n",
    "cm1 = plt.cm.RdBu\n",
    "cm_bright = ListedColormap(['#FF0000', '#0000FF'])\n",
    "\n",
    "score = clf.score(X, y)\n",
    "ax = fig.add_subplot(1,1,1)\n",
    "ms = 3\n",
    "ax.contourf(xx, yy, Z, cmap=cm1, alpha=.8)  \n",
    "perc_use = 0.3\n",
    "non_evoke_idx = np.random.randint(0,len(m_non_evoke),int(perc_use*len(m_non_evoke)))\n",
    "evoke_idx = np.random.randint(0,len(m_evoke),int(perc_use*len(m_evoke)))\n",
    "\n",
    "plt.scatter(m_non_evoke[non_evoke_idx],h_non_evoke[non_evoke_idx], facecolor = 'gray',marker = 'x',s = ms)\n",
    "plt.scatter(m_evoke[evoke_idx],h_evoke[evoke_idx],marker = 'o',s = ms-1,facecolors='none', edgecolors='r')\n",
    "ax.text(xx.max() - .01, yy.min() + .02, ('%.2f' % score).lstrip('0'),\n",
    "        size=fs, horizontalalignment='right')\n",
    "\n",
    "\n",
    "ax.set_xlim(xx.min(), xx.max())\n",
    "ax.set_ylim(yy.min(), yy.max())\n",
    "\n",
    "plt.title(titles[kk], fontsize = fs)\n",
    "ax.set_xlabel('m', fontsize = fs)\n",
    "ax.set_ylabel('h', fontsize = fs)\n",
    "ax = plt.gca()\n",
    "ax.tick_params(direction='in', length=2, width=0.5, colors='k',\n",
    "               grid_color='k', grid_alpha=1)\n",
    "\n",
    "data1 = peak_above_thresh\n",
    "data2 = m_above_thresh\n",
    "X_M  = np.transpose([data1])\n",
    "model_cell = LinearRegression(fit_intercept=True).fit(X_M, np.transpose(data2))\n",
    "slope_pk[0] = model_cell.coef_[0]\n",
    "\n",
    "data1 = amp_above_thresh\n",
    "data2 = m_above_thresh\n",
    "X_M  = np.transpose([data1])\n",
    "model_cell = LinearRegression(fit_intercept=True).fit(X_M, np.transpose(data2))\n",
    "slope_amp[0] = model_cell.coef_[0]\n",
    "\n",
    "data1 = peak_above_thresh\n",
    "data2 = h_above_thresh\n",
    "X_M  = np.transpose([data1])\n",
    "model_cell = LinearRegression(fit_intercept=True).fit(X_M, np.transpose(data2))\n",
    "slope_pk[1] = model_cell.coef_[0]\n",
    "\n",
    "data1 = amp_above_thresh\n",
    "data2 = h_above_thresh\n",
    "X_M  = np.transpose([data1])\n",
    "model_cell = LinearRegression(fit_intercept=True).fit(X_M, np.transpose(data2))\n",
    "slope_amp[1] = model_cell.coef_[0]\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [],
   "source": [
    "colr = ['c','m']\n",
    "titles = ['m','h']\n",
    "\n",
    "fig = plt.figure(figsize=(6,4))\n",
    "\n",
    "\n",
    "plt.subplot(1,2,1)\n",
    "plt.bar(titles,slope_pk,color=colr,width = 0.5)\n",
    "plt.ylabel('slope, peak', fontsize = fs)\n",
    "ax = plt.gca()\n",
    "ax.fontsize = fs\n",
    "ax.tick_params(direction='in', length=2, width=0.5, colors='k',\n",
    "               grid_color='k', grid_alpha=1)\n",
    "\n",
    " \n",
    "plt.subplot(1,2,2)\n",
    "plt.bar(titles,slope_amp,color=colr,align='center',width = 0.5)\n",
    "plt.ylabel('slope, amplitude', fontsize = fs)\n",
    "ax = plt.gca()\n",
    "ax.fontsize = fs\n",
    "ax.tick_params(direction='in', length=2, width=0.5, colors='k',\n",
    "               grid_color='k', grid_alpha=1)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# attenuation as a function of amplitude"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {},
   "outputs": [],
   "source": [
    "# %matplotlib osx\n",
    "\n",
    "nr_hist = 200\n",
    "x_range = [-65,-52]\n",
    "y_range = [0,0.15]\n",
    "min_ind = 1\n",
    "\n",
    "titles = ['baseline','inhib.','cancel.']\n",
    "sites = ['soma','apic','apic up']\n",
    "colr = ['k','r','m']\n",
    "\n",
    "dt = 0.025\n",
    "peak_fit_all = {}\n",
    "lnspc_peak_fit_all = {}\n",
    "height_fit_all = {}\n",
    "lnspc_height_fit_all = {}\n",
    "\n",
    "use_basil_inh = True\n",
    "use_soma_inh = False\n",
    "use_apic_inh = False\n",
    "\n",
    "slp_bias_all = np.zeros((3,2))\n",
    "\n",
    "for kk in range(1):\n",
    "    \n",
    "    if kk == 0:\n",
    "        data_apic_up = [np.hstack(data_baseline_noise_apic_up)]\n",
    "        data_soma = [np.hstack(data_baseline_noise_soma)]\n",
    "        peaks_nspk_loc = peaks_nspk_loc_baseline\n",
    "        peaks_bspk_loc = peaks_bspk_loc_baseline\n",
    "        nspk_peak = nspk_peak_baseline\n",
    "    if kk>0:\n",
    "        if use_basil_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_inhibition_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_inhibition_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_inhibition\n",
    "                peaks_bspk_loc = peaks_bspk_loc_inhibition\n",
    "                nspk_peak = nspk_peak_inhibition\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_cancellation_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_cancellation_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_cancellation\n",
    "                peaks_bspk_loc = peaks_bspk_loc_cancellation\n",
    "                nspk_peak = nspk_peak_cancellation\n",
    "        elif use_apic_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_apic_inh_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_apic_inh_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_apic_inh\n",
    "                peaks_bspk_loc = peaks_bspk_loc_apic_inh\n",
    "                nspk_peak = nspk_peak_apic_inh\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_apic_can_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_apic_can_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_apic_can\n",
    "                peaks_bspk_loc = peaks_bspk_loc_apic_can\n",
    "                nspk_peak = nspk_peak_apic_can\n",
    "        elif use_soma_inh:    \n",
    "            if kk == 1: \n",
    "                data_apic_up = [np.hstack(data_soma_inh_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_soma_inh_noise_soma)]  \n",
    "                peaks_nspk_loc = peaks_nspk_loc_soma_inh\n",
    "                peaks_bspk_loc = peaks_bspk_loc_soma_inh\n",
    "                nspk_peak = nspk_peak_soma_inh\n",
    "            elif kk == 2: \n",
    "                data_apic_up = [np.hstack(data_soma_can_noise_apic_up)]\n",
    "                data_soma = [np.hstack(data_soma_can_noise_soma)]\n",
    "                peaks_nspk_loc = peaks_nspk_loc_soma_can\n",
    "                peaks_bspk_loc = peaks_bspk_loc_soma_can\n",
    "                nspk_peak = nspk_peak_soma_can\n",
    "\n",
    "\n",
    "    if kk == 0 or kk == 1:\n",
    "        min_pk_nsp = -60\n",
    "    elif kk == 2:\n",
    "        min_pk_nsp = -58.5\n",
    "\n",
    "    spk_times_all = turn_trace_to_spk(data_apic_up, min_hight_nspk = min_pk_nsp, min_hight_bspk = -47.5,\\\n",
    "                                      prom_nspk = 1.5, prom_bspk = 15)\n",
    "    peaks_nspk_loc_apic_up = (spk_times_all['peaks_nspk_all'])\n",
    "    nspk_peaks_all_apic_up = np.concatenate([x['peak_heights'] for x in spk_times_all['properties_nspk_all']])\n",
    "\n",
    "    pks_apic = (nspk_peaks_all_apic_up) \n",
    "    peaks_loc_apic = np.concatenate(peaks_nspk_loc_apic_up)\n",
    "    peaks_nspk_loc_apic_up = np.concatenate(peaks_nspk_loc_apic_up)\n",
    "    \n",
    "    fst_dev_apic = np.diff(np.concatenate(data_apic_up),1)\n",
    "    fst_dev_apic = np.insert(fst_dev_apic,0, 0)\n",
    "\n",
    "\n",
    "    mp_nspk_loc_apic = []\n",
    "    idx_list = []\n",
    "    for ii in range(len(peaks_loc_apic)):  \n",
    "        rel_idx = [peaks_loc_apic[ii]-time_before_pk+add_ind+np.where(fst_dev[peaks_loc_apic[ii]-time_before_pk:peaks_loc_apic[ii]]>min_der)[0]]\n",
    "        if len(rel_idx[0])>0:\n",
    "            mp_nspk_loc_apic.append(rel_idx[0][0])\n",
    "        else:\n",
    "            print(ii)\n",
    "            idx_list.append(ii)\n",
    "\n",
    "    pks_apic = np.delete(pks_apic,idx_list)  \n",
    "    peaks_loc_apic = np.delete(peaks_loc_apic,idx_list)  \n",
    "\n",
    "    MP_apic = np.concatenate(data_apic_up)[mp_nspk_loc_apic]\n",
    "    AMP_apic = pks_apic-MP_apic    \n",
    "        \n",
    "    pks_soma = np.concatenate(nspk_peak) \n",
    "    peaks_loc_soma = np.concatenate(peaks_nspk_loc)\n",
    "        \n",
    "    fst_dev_soma = np.diff(np.concatenate(data_soma),1)\n",
    "    fst_dev_soma = np.insert(fst_dev_soma,0, 0)\n",
    "\n",
    "    mp_nspk_loc_soma = np.array([peaks_loc_soma[ii]-time_before_pk+add_ind+np.where(fst_dev_soma[peaks_loc_soma[ii]-time_before_pk:peaks_loc_soma[ii]]>min_der)[0][0] for ii in range(len(peaks_loc_soma))],dtype=int)\n",
    "    MP_soma = np.concatenate(data_soma)[mp_nspk_loc_soma]\n",
    "    AMP_soma = pks_soma-MP_soma    \n",
    "    \n",
    "\n",
    "    idx_all = range(len(MP_soma))\n",
    "    idx_apic_soma = []\n",
    "    idx_apic = []\n",
    "    for jj in range(len(MP_soma)):\n",
    "        apic_f_som = np.concatenate(np.where((peaks_loc_apic > peaks_loc_soma[jj]) & (peaks_loc_apic < peaks_loc_soma[jj]+15)))\n",
    "        if len(apic_f_som)>0:\n",
    "#         if len(np.where(([(a > peaks_loc_soma[jj] and a < peaks_loc_soma[jj]+15) for a in peaks_loc_apic]))[0])>0:\n",
    "            idx_apic_soma.append(jj) \n",
    "            idx_apic.append(apic_f_som[0]) \n",
    "    if len(idx_apic_soma) != len(peaks_loc_apic):\n",
    "        print('mismatch')\n",
    "    \n",
    " \n",
    "    bin_size = 0.1\n",
    "    AMP_som_rel = AMP_soma[idx_apic_soma]\n",
    "    AMP_apic_rel = AMP_apic[idx_apic]\n",
    "    \n",
    "    nbins = np.ceil((np.max(AMP_som_rel)-np.min(AMP_som_rel))/bin_size)\n",
    "    edges = np.linspace(np.min(AMP_som_rel),np.max(AMP_som_rel),num = int(nbins+1))\n",
    "    counts_all, bins_all = np.histogram(np.array(AMP_som_rel,dtype='float'),bins=edges)\n",
    "    inds_amp_som = np.digitize(np.array(AMP_som_rel,dtype='float'), bins=edges)\n",
    "    mean_amp_apic = np.empty((np.size(edges)))\n",
    "    mean_amp_apic[:] = np.NaN\n",
    "    for ii in range(len(edges)):\n",
    "        if np.sum(inds_amp_som == ii) > min_ind:\n",
    "            mean_amp_apic[ii] =  np.mean(AMP_apic_rel[inds_amp_som == ii])                  \n",
    "\n",
    "    X_MP  = (AMP_som_rel.reshape(-1, 1))\n",
    "    model_cell = LinearRegression(fit_intercept=True).fit(X_MP, (AMP_som_rel-AMP_apic_rel).reshape(-1, 1))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [],
   "source": [
    "fig = plt.figure(figsize=(7,3))\n",
    "\n",
    "fs = 6\n",
    "ts = 6\n",
    "plt.rc('xtick',labelsize=fs)\n",
    "plt.rc('ytick',labelsize=fs)\n",
    "\n",
    "plt.subplot(1,2,1)\n",
    "plt.scatter(edges,mean_amp_apic,s=10, color = colr[kk])\n",
    "plt.ylabel('AMP apic (mV)', fontsize = fs)\n",
    "plt.xlabel('amplitude, recorded in soma (mV)', fontsize = fs)\n",
    "\n",
    "plt.subplot(1,2,2)\n",
    "plt.scatter(edges,edges-mean_amp_apic,s=10, color = colr[kk])\n",
    "plt.ylabel('attenuation amount (mV)', fontsize = fs)\n",
    "plt.xlabel('amplitude, recorded in soma (mV)', fontsize = fs)\n",
    "\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}