import os
import subprocess
import shutil
import time
import neuron as nrn
import random
import numpy
import struct
import csv
from deap import base, creator, tools, algorithms
from score_functions import chi_square_normal
from NeuroGPUFromPkl import run_params_with_pkl
global pmax
global pmin
global ptarget
global orig_volts
NPARAMS = 3
POP_SIZE = 256
model_dir = 'C:/BBP_new/'
data_dir = 'C:/BBP_new/Data/'
param_file = 'C:/BBP_new/params/gen.csv'
run_dir = 'C:/pyNeuroGPU_win2/'
vs_fn = run_dir + 'Data/VHotP.dat'
params_table = data_dir + '../params/opt_table.csv'
orig_volts = data_dir + '../volts/orig_step_cADpyr232_L5_TTPC1_0fb1ca4724[0].soma[0].dat'
def nrnMread(fileName):
    f = open(fileName, "rb")
    nparam = struct.unpack('i', f.read(4))[0]
    typeFlg = struct.unpack('i', f.read(4))[0]
    return numpy.fromfile(f,numpy.double)



def getOrig(fileName):

    nrn.h.paramsFile = fileName
    nrn.h.psize=1
    nrn.h("pmat = new Matrix(psize, nparams)")
    nrn.h("readMatrix(paramsFile, pmat)")
    nrn.h("runMatrix(pmat,stims)")
    nrn.h("print matOut")
    nrn.h("matOut = matOut.to_vector()")
    orig_volts = nrn.h.matOut.to_python()
    return orig_volts



def init_nrngpu():
    global pmin
    global pmax
    global ptarget
    global orig_volts
    modelFile = "./runModel.hoc"
    nrn.h.load_file(modelFile)
    data = numpy.genfromtxt(params_table,delimiter=',',names=True)
    pmin = data[0]
    pmax = data[1]
    ptarget = data[2]
    orig_volts = getOrig(orig_volts)

init_nrngpu()



hocmodel_name =  data_dir + os.path.basename(nrn.h.modelFile)[:-3] + 'pkl'
creator.create("FitnessMax", base.Fitness, weights=(-1.0,))
creator.create("Individual", list, fitness=creator.FitnessMax)

toolbox = base.Toolbox()
toolbox.register("attr_float", random.random)
toolbox.register("individual", tools.initRepeat, creator.Individual,
                 toolbox.attr_float, n=NPARAMS)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
toolbox.register("mate", tools.cxTwoPoint)
toolbox.register("mutate", tools.mutGaussian, mu=0, sigma=1, indpb=0.2)
toolbox.register("select", tools.selTournament, tournsize=3)

def evaluate(population):
    global orig_volts
    param_mat = numpy.array(population)
    for pind in range(0,NPARAMS):
        min_p = pmin[pind]
        max_p = pmax[pind]
        p_values = param_mat[:,pind]*(max_p-min_p)+min_p
        param_mat[:,pind] = p_values

    if os.path.exists(vs_fn):
        os.remove(vs_fn)
    numpy.savetxt(param_file,param_mat,delimiter=' ')
    os.chdir(model_dir)
    curr_psize = len(population)
    run_params_with_pkl(hocmodel_name, param_file, curr_psize)
    shutil.move(data_dir + 'AllParams.csv', run_dir + "/Data/AllParams.csv")
    time.sleep(1)
    os.chdir(run_dir + '/x64/')
    subprocess.call('NeuroGPU6.exe')
    while not os.path.exists(vs_fn):
        time.sleep(0.1)
    #file exists
    volts = nrnMread(vs_fn)
    Nt = int(len(volts)/curr_psize)
    all_volts = numpy.reshape(volts, [curr_psize, Nt])
    scores = [None]*len(all_volts)
    for curr_ind in xrange(len(all_volts)):
        temp = chi_square_normal(orig_volts,all_volts[curr_ind])
        scores[curr_ind] = [temp]
    return scores


toolbox.register("evaluate", evaluate)


def getVolts(params):

    nrn.h.transvec.from_python(params)
    nrn.h.psize = 1
    nrn.h("initialize(v_init)")
    nrn.h("pmat = new Matrix(1,nparams)")
    nrn.h("pmat.setrow(0,transvec)")
    nrn.h("runMatrix(pmat, stims)")

    return nrn.h.matOut.getrow(0).to_python()


def varOr(population, toolbox, lambda_, cxpb, mutpb):
    """Part of an evolutionary algorithm applying only the variation part
    (crossover, mutation **or** reproduction). The modified individuals have
    their fitness invalidated. The individuals are cloned so returned
    population is independent of the input population.

    :param population: A list of individuals to vary.
    :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution
                    operators.
    :param lambda\_: The number of children to produce
    :param cxpb: The probability of mating two individuals.
    :param mutpb: The probability of mutating an individual.
    :returns: The final population
    :returns: A class:`~deap.tools.Logbook` with the statistics of the
              evolution

    The variation goes as follow. On each of the *lambda_* iteration, it
    selects one of the three operations; crossover, mutation or reproduction.
    In the case of a crossover, two individuals are selected at random from
    the parental population :math:`P_\mathrm{p}`, those individuals are cloned
    using the :meth:`toolbox.clone` method and then mated using the
    :meth:`toolbox.mate` method. Only the first child is appended to the
    offspring population :math:`P_\mathrm{o}`, the second child is discarded.
    In the case of a mutation, one individual is selected at random from
    :math:`P_\mathrm{p}`, it is cloned and then mutated using using the
    :meth:`toolbox.mutate` method. The resulting mutant is appended to
    :math:`P_\mathrm{o}`. In the case of a reproduction, one individual is
    selected at random from :math:`P_\mathrm{p}`, cloned and appended to
    :math:`P_\mathrm{o}`.

    This variation is named *Or* beceause an offspring will never result from
    both operations crossover and mutation. The sum of both probabilities
    shall be in :math:`[0, 1]`, the reproduction probability is
    1 - *cxpb* - *mutpb*.
    """
    assert (cxpb + mutpb) <= 1.0, (
        "The sum of the crossover and mutation probabilities must be smaller "
        "or equal to 1.0.")

    offspring = []
    for _ in xrange(lambda_):
        op_choice = random.random()
        if op_choice < cxpb:            # Apply crossover
            ind1, ind2 = map(toolbox.clone, random.sample(population, 2))
            ind1, ind2 = toolbox.mate(ind1, ind2)
            del ind1.fitness.values
            offspring.append(ind1)
        elif op_choice < cxpb + mutpb:  # Apply mutation
            ind = toolbox.clone(random.choice(population))
            ind, = toolbox.mutate(ind)
            del ind.fitness.values
            offspring.append(ind)
        else:                           # Apply reproduction
            offspring.append(random.choice(population))

    return offspring


def eaMuPlusLambda(population, toolbox, mu, lambda_, cxpb, mutpb, ngen,
                   stats=None, halloffame=None, verbose=__debug__):
    """This is the :math:`(\mu + \lambda)` evolutionary algorithm.

    :param population: A list of individuals.
    :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution
                    operators.
    :param mu: The number of individuals to select for the next generation.
    :param lambda\_: The number of children to produce at each generation.
    :param cxpb: The probability that an offspring is produced by crossover.
    :param mutpb: The probability that an offspring is produced by mutation.
    :param ngen: The number of generation.
    :param stats: A :class:`~deap.tools.Statistics` object that is updated
                  inplace, optional.
    :param halloffame: A :class:`~deap.tools.HallOfFame` object that will
                       contain the best individuals, optional.
    :param verbose: Whether or not to log the statistics.
    :returns: The final population
    :returns: A class:`~deap.tools.Logbook` with the statistics of the
              evolution.

    The algorithm takes in a population and evolves it in place using the
    :func:`varOr` function. It returns the optimized population and a
    :class:`~deap.tools.Logbook` with the statistics of the evolution. The
    logbook will contain the generation number, the number of evalutions for
    each generation and the statistics if a :class:`~deap.tools.Statistics` is
    given as argument. The *cxpb* and *mutpb* arguments are passed to the
    :func:`varOr` function. The pseudocode goes as follow ::

        evaluate(population)
        for g in range(ngen):
            offspring = varOr(population, toolbox, lambda_, cxpb, mutpb)
            evaluate(offspring)
            population = select(population + offspring, mu)

    First, the individuals having an invalid fitness are evaluated. Second,
    the evolutionary loop begins by producing *lambda_* offspring from the
    population, the offspring are generated by the :func:`varOr` function. The
    offspring are then evaluated and the next generation population is
    selected from both the offspring **and** the population. Finally, when
    *ngen* generations are done, the algorithm returns a tuple with the final
    population and a :class:`~deap.tools.Logbook` of the evolution.

    This function expects :meth:`toolbox.mate`, :meth:`toolbox.mutate`,
    :meth:`toolbox.select` and :meth:`toolbox.evaluate` aliases to be
    registered in the toolbox. This algorithm uses the :func:`varOr`
    variation.
    """
    logbook = tools.Logbook()
    logbook.header = ['gen', 'nevals'] + (stats.fields if stats else [])

    # Evaluate the individuals with an invalid fitness
    invalid_ind = [ind for ind in population if not ind.fitness.valid]
    fitnesses = evaluate(invalid_ind)

    for ind, fit in zip(invalid_ind, fitnesses):
        ind.fitness.values = fit

    if halloffame is not None:
        halloffame.update(population)

    record = stats.compile(population) if stats is not None else {}
    logbook.record(gen=0, nevals=len(invalid_ind), **record)
    if verbose:
        print(logbook.stream)

    # Begin the generational process
    for gen in range(1, ngen + 1):
        # Vary the population
        offspring = varOr(population, toolbox, lambda_, cxpb, mutpb)

        # Evaluate the individuals with an invalid fitness
        invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
        fitnesses = evaluate(invalid_ind)
        for ind, fit in zip(invalid_ind, fitnesses):
            ind.fitness.values = fit

        # Update the hall of fame with the generated individuals
        if halloffame is not None:
            halloffame.update(offspring)

        # Select the next generation population
        population[:] = toolbox.select(population + offspring, mu)

        # Update the statistics with the new population
        record = stats.compile(population) if stats is not None else {}
        logbook.record(gen=gen, nevals=len(invalid_ind), **record)
        if verbose:
            print (logbook.stream)

    return population, logbook

def main():
    random.seed(64)
    pop = toolbox.population(n=POP_SIZE)
    hof = tools.HallOfFame(1)
    stats = tools.Statistics(lambda ind: ind.fitness.values)
    stats.register("avg", numpy.mean)
    stats.register("std", numpy.std)
    stats.register("min", numpy.min)
    stats.register("max", numpy.max)

    pop, log = eaMuPlusLambda(pop, toolbox,mu=2,lambda_=290,cxpb=0.5, mutpb=0.2, ngen=20,                          stats=stats, halloffame=hof, verbose=True)

    print(log)
    print (pop)

    return pop, log, hof


if __name__ == "__main__":
    main()