/*--------------------------------------------------------------------------
 Author: Christopher L Buckley

 Institute: Centre for Computational Neuroscience and Robotics
 University of Sussex
 FcondNetworkmer, Brighton BN1 9QJ, UK

 email to:  c.l.buckley@sussex.ac.uk

 version: 2009-11-02
 --------------------------------------------------------------------------*/

#ifndef NEUROSYNNETWORK_H
#define NEUROSYNNETWORK_H

class neuroSynNetwork {
public:
	list<neuron *> neurs;
	list<synapse *> syns;
	list<neuron *>::iterator niter;
	list<synapse *>::iterator siter;

	NeuroSyn **theLNs;


	Emptysynapse **theSynapses;
	Emptysynapse **thePoissonSynapses;

	DCInput **directLNInput;
	DCInput **directORNInput;
	NeuroSynPoisson **mPoissonInput;
	NeuronModel *model;

	rk65n *machine;
	double *x, *xn;
	double dt, dtx;
	int N;

	Array2D<double> mWeights;
	Array1D<double> mPoissonWeights;
	Array1D<double> mSeqArray;
	neuroSynNetwork();
	~neuroSynNetwork();

	void generateNetwork();
	void generateConnect();
	void init();
	void enable();
	Array1D<double> run(double tme, double inputCurr,Array1D<int> inputStart);
	double TransferInv(double in, double *params);
	void SetWeights(Array2D<double> weights, Array1D<double> SeqArray);
	void SetCC(Array1D<double> biasCC);

};

neuroSynNetwork::neuroSynNetwork() {
	dt = 0.0001;
	dtx = 0.0;
	N = 0.0;
	directLNInput = new DCInput*[NoLNs];


	mPoissonInput = new NeuroSynPoisson*[NoLNs];
	theLNs = new NeuroSyn*[NoLNs];




	for (int i = 0; i < NoLNs; i++) {
		theLNs[i] = new NeuroSyn(i, PARAMS_LN);
		neurs.push_back(theLNs[i]);
		directLNInput[i] = new DCInput(theLNs[i], 0);

	}

	for (int i = 0; i < NoLNs; i++) {
			mPoissonInput[i] = new NeuroSynPoisson(i,PARAMS_POISSON);
			neurs.push_back(mPoissonInput[i]);
		}


	//set up connections
		theSynapses = new Emptysynapse*[NoLNs*NoLNs];
		thePoissonSynapses = new Emptysynapse*[NoLNs];
		for (int i = 0; i < NoLNs; i++) {
			thePoissonSynapses[i] = new Emptysynapse(mPoissonInput[i], theLNs[i],0.0);

					for (int j = 0; j < NoLNs; j++) {

						if (i != j) {
							theSynapses[j*NoLNs+i] = new Emptysynapse(theLNs[i], theLNs[j],0.0);
						}
					}
				}


	//enable integaror
	enable();
}

neuroSynNetwork::~neuroSynNetwork() {
	list<neuron *>::iterator i;
	list<synapse *>::iterator j;
	for (i = neurs.begin(); i != neurs.end(); i++) {
		delete *i;
	}
	for (j = syns.begin(); j != syns.end(); j++) {
	}

	delete[] mPoissonInput;
	delete[] theLNs;
	delete[] directLNInput;

}

void neuroSynNetwork::enable() {
	model = new NeuronModel(&neurs, &syns, N, cerr);
	x = new double[N];
	xn = new double[N];
	machine = new rk65n(N, rk65_MINDT, rk65_eps, rk65_absEps, rk65_relEps);
}



void neuroSynNetwork::SetWeights(Array2D<double> weights, Array1D<double> SeqArray)
{


	mWeights = weights;
	mSeqArray = SeqArray;

	for (int i = 0; i < NoLNs; i++) {
		for (int j = 0; j < NoLNs; j++) {

			if (i != j) {
				theSynapses[j * NoLNs + i]->set_gsyn(mWeights[i][j]);
			}
		}
	}


}


void neuroSynNetwork::SetCC(Array1D<double> biasCC)
{
	double Parameters[NEUROSYN_PNO];

	/*	for (int i = 0; i < NEUROSYN_PNO; i++)
				Parameters[i] = PARAMS_LN[i];


		for (int i = 0; i < NoLNs; i++) {
			Parameters[11] = biasCC[i];
			theLNs[i]->set_p(Parameters);
		}
	*/

	double pgSyn;

	double alpha = PARAMS_POISSON[14];
			double beta = PARAMS_POISSON[15];
			double tr = PARAMS_POISSON[16];
			double Freq = PARAMS_POISSON[18];
			double vrev = PARAMS_POISSON[12];


		//set the wieght for poisson input
		for (int i = 0; i < NoLNs; i++)
		{
			pgSyn = biasCC[i]/(alpha*tr*Freq/beta)/(vrestArray[i]-vrev);
			thePoissonSynapses[i]->set_gsyn(pgSyn);
		}

		cout << "The possi eq " << endl << alpha*tr*Freq/beta << endl;
}


void neuroSynNetwork::init() {
	dt = 0.0001;
	dtx = 0.0;
	N = 0.0;
	double alpha = PARAMS_POISSON[14];
	double beta = PARAMS_POISSON[15];
				double tr = PARAMS_POISSON[16];
				double Freq = PARAMS_POISSON[18];




	double Initvalue[NEUROSYN_IVARNO];


	int counter = 0;
	int counter2 = 0;
	for (niter = neurs.begin(); niter != neurs.end(); niter++) {
		if (counter < NoLNs) {
			for (int i = 0; i < NEUROSYN_IVARNO; i++)
					Initvalue[i] = NEUROSYN_INIVARS[i]*RG.n()-0.5*RG.n();
			Initvalue[5] =  mSeqArray[counter2];
			(*niter)->init(x, Initvalue);
			counter2++;

		} else {
					Initvalue[0] = (alpha*tr*Freq/beta);
			(*niter)->init(x, Initvalue);
		}

		counter = counter + 1;
	}

	for (int i = 0; i < NoDirectLNInput; i++)
			directLNInput[i]->set_I(0.0);
}

Array1D<double> neuroSynNetwork::run(double tme, double inputCurr,Array1D<int> inputStart) {



	Array1D<double> endPoints(2*NoLNs, 0.0);
	vector<double> spike_history;
	stringstream name;
	char thename[80];
	ofstream NSDataN, NSDataS, NSDataM, PSpikes;

	NSDataN.precision(10);
	NSDataS.precision(10);
	NSDataM.precision(10);
	PSpikes.precision(10);

	name.clear();
	int fileInt = int(inputCurr);



	if(plotInc)
			name << "NS"  << 100*percentCritical << "DataF" << int(10000000*inputCurr) << ".dat"<< ends;
		else
		   name << "CondF"<< "N"  << "fs"  <<  ".dat" << ends;

	name >> thename;
		if (OutDat)
			NSDataN.open(thename);

		name.clear();

		if(plotInc)
		name << "NS"<< 100*percentCritical   << "DataS" << int(10000000*inputCurr) << ".dat"<< ends;
		else
			name << "CondS"<< "N"  << "fs"  <<  ".dat" << ends;

		name >> thename;
		if (OutDat)
			NSDataS.open(thename);

		name.clear();

		if(plotInc)
		name << "NS"<< 100*percentCritical   << "DataM" << int(10000000*inputCurr) << ".dat"<< ends;
		else
		name << "CondM"<< "N"  << "fs"  <<  ".dat" << ends;
	name >> thename;
	if (OutDat)
		NSDataM.open(thename);

	name.clear();

	if(!plotInc){
		name << "PSpikes.dat" << ends;
		name >> thename;
		PSpikes.open(thename);
	}



	double *tmp;

	x[0] = 0;
	while (x[0] < tme) {

		for (int i = 0; i < NoLNs; i++) {
			//		cerr << theLNs[i]->E(x) << " ";
			if (isnan(theLNs[i]->E(x))) {
				//		cerr << "nan encountered!" << endl;
				exit(1);
			}
		}
		//		cerr << endl;



		double tdt = 0.0;


		for (int i = 0; i < NoLNs; i++)
					mPoissonInput[i]->advance(x,0.1,bitOne[i]);

		while (abs(tdt - 0.1) > 1e-9) {

			for (int i = 0; i < NoDirectLNInput; i++){
							if ((x[0] > IMPULSESTART+inputStart[i]) && (x[0] < (IMPULSESTART+inputStart[i] + 1.0)))
							directLNInput[i]->set_I(inputCurr);
					}

						for (int i = 0; i < NoDirectLNInput; i++){

					if (x[0] > IMPULSESTART+inputStart[i] + IMPULSEDUR && (x[0] < (IMPULSESTART+inputStart[i]
							+ IMPULSEDUR + 1.0)))
							directLNInput[i]->set_I(0.0);
					}




			dt = min(dt, 0.1 - tdt);
			dtx = machine->integrate(x, xn, model, dt);

			dtx = min(dtx, 2.0 * dt);
			tmp = x;
			x = xn;
			xn = tmp;
			tdt += dt;
			dt = dtx;

		}

		for (int i = 0; i < NoLNs; i++)
			theLNs[i]->spike_detect(x);

		if(!plotInc)
			cout << x[0] << endl;

		if (OutDat) {

			//	cout << x[0] << endl;
			NSDataN << x[0];



			for (int i = 0; i < NoLNs; i++) {
				double spiker = 0;
				if (theLNs[i]->start_spiking)
					spiker = 1;

				NSDataN << " " << spiker;
			}


			PSpikes << x[0];
			for (int i = 0; i < NoLNs; i++) {
				PSpikes << " " <<  mPoissonInput[i]->S(x);
			}
			PSpikes <<endl;

			NSDataN << endl;

			NSDataM << x[0];


			for (int i = 0; i < NoLNs; i++)
				NSDataM << " " << theLNs[i]->E(x);

			NSDataM << endl;

			NSDataS << x[0];

			for (int i = 0; i < NoLNs; i++) {
				NSDataS << " " << theLNs[i]->S(x);
			}




			NSDataS << endl;
		}



		if (x[0] > (IMPULSESTART - 800) && x[0] < IMPULSESTART) {

			for (int i = 0; i < NoLNs; i++) {
				double spiker = 0;
		//		theLNs[i]->spike_detect(x);
				if (theLNs[i]->start_spiking)
					spiker = 1.0;

				endPoints[i] = endPoints[i] + spiker;
			}

		}


		if (x[0] > (THETIME - 800)) {

			for (int i = NoLNs; i < 2*NoLNs; i++) {
				double spiker = 0;
	//			theLNs[i-NoLNs]->spike_detect(x);
				if (theLNs[i-NoLNs]->start_spiking)
					spiker = 1.0;

				endPoints[i] = endPoints[i] + spiker;
			}

		}

	}

	if (OutDat) {
		NSDataN.close();
		NSDataS.close();
		NSDataM.close();
	}

	for (int i = 1; i < 2*NoLNs; i++)
		endPoints[i] = endPoints[i]/800.0;

	return endPoints;

}

double neuroSynNetwork::TransferInv(double in, double *params) {

	double alpha = params[14];
		double beta = params[15];
		double tr = params[16];

		double lF = beta/log((alpha*exp(beta*tr))/in-alpha/in+1);

		double out =  (lF-C)/M;
		return out;

}
#endif