/* FFNet_Pattern_Activation.c
 *
 * Copyright (C) 1994-2002 David Weenink
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 2 of the License, or (at
 * your option) any later version.
 *
 * This program is distributed in the hope that it will be useful, but
 * WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 */

/*
 djmw 19960826
 djmw 20020712 GPL header
*/

#include "Graphics.h"
#include "FFNet_Pattern_Activation.h"

static double func (I, const double p[])
{
    iam (FFNet); 
	Minimizer thee = my minimizer; 
	long i, j, k; 
	double fp = 0;

    for (j = 1, k = 1; k <= my nWeights; k++)
    {
        my dw[k] = 0.0;
        if (my wSelected[k]) my w[k] = p[j++];
    }
    for (i = 1; i <= my nPatterns; i++)
    {
    	FFNet_propagate (me, my inputPattern[i], NULL);
        fp += FFNet_computeError (me, my targetActivation[i]);
        FFNet_computeDerivative (me);
        /* derivative (cumulative) */
        for (k = 1; k <= my nWeights; k++) my dw[k] += my dwi[k];
    }
	thy funcCalls++;
    return fp;
}

static void dfunc_optimized (I, const double p[], double dp[])
{
    iam (FFNet);
	long j, k;
	(void) p; 

    for (j = 1, k = 1; k <= my nWeights; k++)
	{
    	if (my wSelected[k]) dp[j++] = my dw[k];
	}
}

static int _FFNet_Pattern_Activation_learn (FFNet me, Pattern pattern, 
	Activation activation, long maxNumOfEpochs, double tolerance, 
	Any parameters, int costFunctionType, int reset)
{
	int status;
    if (my nInputs != pattern->nx) return Melder_error 
		("FFNet_Pattern_Activation_learn:"
    	"#colums in Pattern not equal #inputs of neural net ");
    if (my nOutputs != activation->nx) return Melder_error 
		("FFNet_Pattern_Activation_learn:"
    	"#colums in activation not equal #outputs of neural net "); 
	if (pattern->ny != activation->ny) return Melder_error 
		("FFNet_Pattern_Activation_learn:"
		"#rows of input not equal to #rows of activation.");
		
    Minimizer_setParameters (my minimizer, parameters);
	
    /*
		Link the things to be learned
	*/
	
    my nPatterns = pattern -> ny;
    my inputPattern = pattern -> z;
    my targetActivation = activation -> z;
    FFNet_setCostFunction (me, costFunctionType);
	
    if (reset) 
    {
    	long i, k = 1;
    	double *wbuf = NUMdvector (1, my dimension);
		
    	if (wbuf == NULL) return 0;
		
    	for (i = 1; i <= my nWeights; i++)
		{
			if (my wSelected[i]) wbuf[k++] = my w[i];
		} 
    	Minimizer_reset (my minimizer, wbuf);
    	NUMdvector_free (wbuf, 1);
    }
    status = Minimizer_minimize (my minimizer, maxNumOfEpochs, tolerance, 1);

    /*
		Unlink
	*/
	
    my nPatterns = 0; 
	my inputPattern = NULL; 
	my targetActivation = NULL;
	
    return status;
}
  
int FFNet_Pattern_Activation_learnPR (FFNet me, Pattern p, Activation a, 
	long maxNumOfEpochs, double tolerance, Any parameters, int costFunctionType)
{
    int resetMinimizer = 0;
	
	/*
		Did we choose another minimizer?
	*/
	
    if (my minimizer != NULL && 
		! Thing_member (my minimizer, classPolakRibiereMinimizer))
	{
		forget (my minimizer);
		resetMinimizer = 1;
	}
	
	/*
		Create the minimizer if it doesn't exist
	*/
	
    if (my minimizer == NULL)
    {
    	resetMinimizer = 1;
		my minimizer = PolakRibiereMinimizer_create (my dimension, me, func,
			dfunc_optimized);
    	if (my minimizer == NULL) return 0;
    }
    return _FFNet_Pattern_Activation_learn (me, p, a, maxNumOfEpochs,
		tolerance, parameters, costFunctionType, resetMinimizer);
}

int FFNet_Pattern_Activation_learnFP (FFNet me, Pattern p, Activation a, 
	long maxNumOfEpochs, double tolerance, Any parameters, int costFunctionType)
{
    int resetMinimizer = 0;
	/* did we choose another minimizer */
    if (my minimizer != NULL && 
		! Thing_member (my minimizer, classFletcherPowellMinimizer))
	{
		forget (my minimizer);
		resetMinimizer = 1;
	}
	/*
		Create the minimizer if it doesn't exist
	*/
    if (my minimizer == NULL)
    {
    	resetMinimizer = 1;
		my minimizer = FletcherPowellMinimizer_create (my dimension, me, func,
			dfunc_optimized);
    	if (my minimizer == NULL) return 0;
    }
    return _FFNet_Pattern_Activation_learn (me, p, a, maxNumOfEpochs,
		tolerance, parameters, costFunctionType, resetMinimizer);
}

int FFNet_Pattern_Activation_learnSD (FFNet me, Pattern p, Activation a, 
	long maxNumOfEpochs, double tolerance, Any parameters, int costFunctionType)
{
    int resetMinimizer = 0;
	/* Did we choose another minimizer */
    if (my minimizer != NULL && 
		! Thing_member (my minimizer, classSteepestDescentMinimizer))
	{
		forget (my minimizer);
		resetMinimizer = 1;
	}
	/* create the minimizer if it doesn't exist */
    if (my minimizer == NULL)
    {
    	resetMinimizer = 1;
		my minimizer = SteepestDescentMinimizer_create (my dimension, me, func,
			dfunc_optimized);
    	if (my minimizer == NULL) return 0;
    }
    return _FFNet_Pattern_Activation_learn (me, p, a, maxNumOfEpochs,
		tolerance, parameters, costFunctionType, resetMinimizer);
}

int FFNet_Pattern_Activation_learnSM (FFNet me, Pattern p, Activation a, 
	long maxNumOfEpochs, double tolerance, Any parameters, int costFunctionType)
{
    int resetMinimizer = 0;
	/*
		Did we choose another minimizer
	*/
    if (my minimizer != NULL && 
		! Thing_member (my minimizer, classVDSmagtMinimizer))
	{
		forget (my minimizer);
		resetMinimizer = 1;
	}
	/* create the minimizer if it doesn't exist */
    if (! my minimizer)
    {
    	resetMinimizer = 1;
		my minimizer = VDSmagtMinimizer_create (my dimension, me, func, 
			dfunc_optimized);
    	if (my minimizer == NULL) return 0;
    }
    return _FFNet_Pattern_Activation_learn (me, p, a, maxNumOfEpochs,
		tolerance, parameters, costFunctionType, resetMinimizer);
}

Activation FFNet_Pattern_to_Activation (FFNet me, Pattern p, long layer)
{
    Activation thee;
    long i, nPatterns = p -> ny;
	
    if (layer < 1 || layer > my nLayers) layer = my nLayers;
    if (my nInputs != p->nx) return Melder_errorp 
		("FFNet_Pattern_to_Activation:"
		"#colums in Pattern not equal #inputs of neural net ");
		
	thee = Activation_create (nPatterns, my nUnitsInLayer[layer]);	
    if (thee == NULL) return NULL;
	
    for (i = 1; i <= nPatterns; i++)
    {
    	FFNet_propagateToLayer (me, p -> z[i], thy z[i], layer);
    }
    return thee;
}

/* End of file FFNet_Pattern_Activation.c */
