/* manual_FFNet.c
 *
 * Copyright (C) 1994-2003 David Weenink
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 2 of the License, or (at
 * your option) any later version.
 *
 * This program is distributed in the hope that it will be useful, but
 * WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 */

/*
 djmw 20020408 GPL
 djmw 20030317 Latest modification
*/

#include "ManPagesM.h"

void manual_FFNet_init (ManPages me);
void manual_FFNet_init (ManPages me)
{

MAN_BEGIN ("FFNet", "djmw", 19961015)
INTRO ("One of the @@types of objects@ in P\\s{RAAT}.")
NORMAL ("An object of type #FFNet models a Feedforward Neural Net. A feedforward "
	"neural net can %learn associations between its %input and its %output. "
	"The basic unit of a neural net is the %node. Several nodes are grouped together "
	"in a %layer. Each unit in a layer is connected to all units in the previous layer. "
	"A feedforward neural net contains one, two or three layers. "
	"All layers not directly connected to outputs are %%hidden layers%."
	"The minimal net has one layer with 1 unit and no hidden layers.")
ENTRY ("FFNet commands")
NORMAL ("Creation:")
LIST_ITEM ("\\bu @@Pattern & Categories: To Feedforward Net...@")
LIST_ITEM ("\\bu @@New: Create Feedforward Net...@")
NORMAL ("Drawing:")
LIST_ITEM ("\\bu @@FFNet: Draw topology@")
LIST_ITEM ("\\bu @@FFNet: Draw weights to layer...@")
LIST_ITEM ("\\bu @@FFNet: Draw cost history...@")
NORMAL ("Modification:")
LIST_ITEM ("\\bu @@FFNet: Reset...@")
LIST_ITEM ("\\bu ##FFNet: Select biases...#")
LIST_ITEM ("\\bu ##FFNet: Select all weights#")
NORMAL ("Classification:")
LIST_ITEM ("\\bu @@FFNet & Pattern: To Categories...@")
NORMAL ("Analysis:")
LIST_ITEM ("\\bu ##FFNet & Pattern: To Activation...#")
NORMAL ("Learning:")
LIST_ITEM ("\\bu @@Pattern & FFNet & Categories: Learn...@")
LIST_ITEM ("\\bu @@Pattern & FFNet & Categories: Learn slow...@")
ENTRY ("How to get started")
NORMAL ("You can create an example set of objects with the ##Create iris example...# button "
	"which you can find under the ##Neural nets# option in the #New menu. "
	"In the ##List of Objects# will appear three new objects: a #FFNet, a #Categories and "
	"a #Pattern, all named %iris.")
NORMAL ("The #Pattern will contain the @@iris data set@ (150 rows, 4 columns, all measurement "
	"values divided by 10). In the #Categories object the three species %setosa, "
	"%versicolor, and %virginica were categorized as #1, #2 and #3, respectively. "
	"The #FFNet has 4 input units and 3 output units. When you have entered a positive number in "
	"the form it will have this number of units in a %%hidden layer%.")
NORMAL ("First thing you want to do is the #FFNet to learn the association between #Pattern and "
	"#Categories. Select all three iris objects and choose ##Learn (SM)...#")
NORMAL ("When you are curious how well the net learned the association, you select the "
	"#FFNet and the #Pattern and choose ##To Categories (classify)...#. A new #Categories object appears "
	"in the ##List of Objects# also with the name %iris. Select the two #Categories and "
	"choose #Difference to get a text window with the percentage correct identification "
	" (if you want more specific information choose ##To Confusion# and the @Confusion matrix "
	"will be generated).")
NORMAL ("With #Pattern and #Categories selected, you can for example create a new #FFNet. You also "
	"can create a #Discriminant which can immediately be used for classification "
	" (with a #Pattern).")
MAN_END

MAN_BEGIN ("FFNet: Draw topology", "djmw", 19970218)
INTRO ("You can choose this command after selecting 1 or more @FFNet's.")
ENTRY ("Behaviour")
NORMAL ("Draws all nodes and all connections of a feedforward neural net.")
MAN_END

MAN_BEGIN ("FFNet: Draw weights to layer...", "djmw", 19970218)
INTRO ("You can choose this command after selecting 1 or more @FFNet's.")
ENTRY ("Arguments")
TAG ("%%Connections to layer% (\\>_1)")
DEFINITION ("values of all the weights that connect to this layer will be drawn.")
TAG ("%Scale")
DEFINITION ("determines the scaling of the weights.")
TAG ("%Garnish")
DEFINITION ("determines whether additional information is drawn.")
ENTRY ("Behaviour")
NORMAL ("The weights are arranged in a matrix with elements %W__%ij_, where %i runs from 1 to the %%number "
	"of nodes in the selected layer% and %j runs from 1 to the (%%number of units in the previous "
	" layer% +1). The values of these weights are shown as squares, where the area of the square is proportional "
	"to its value. Negative values are shown in black.")
NORMAL ("The scaling is done row-wise, column-wise or matrix-wise, according to the selected algorithm.")
MAN_END

MAN_BEGIN ("FFNet: Draw cost history...", "djmw", 19970218)
INTRO ("You can choose this command after selecting 1 or more @FFNet's.")
ENTRY ("Arguments")
TAG ("%%From iteration, To iteration")
DEFINITION ("determine the horizontal scale of the plot.")
TAG ("%%From cost, To cost")
DEFINITION ("determine the vertical scale of the plot.")
TAG ("%Garnish")
DEFINITION ("determines whether a box and axis labels are drawn.")
ENTRY ("Behaviour")
NORMAL ("Draws the cost of learning versus iteration number.")
MAN_END

MAN_BEGIN ("FFNet: Reset...", "djmw", 19970218)
INTRO ("You can choose this command after selecting 1 or more @FFNet's.")
ENTRY ("Arguments")
TAG ("%%Range for weights")
DEFINITION ("gives range around zero.")
ENTRY ("Behaviour")
NORMAL ("All (selected) weights are reset to random numbers uniformly drawn from the range selected. "
	"This command also clears the cost history.")
ENTRY ("WARNING")
NORMAL ("This command destroys all previous learning.")
MAN_END

MAN_BEGIN ("New: Create Feedforward Net...", "djmw", 19960918)
INTRO ("Create a new feedforward neural net of type @FFNet.")
ENTRY ("Arguments")
TAG ("%%Number of input units%")
DEFINITION ("the dimension of the input, positive (default 1).")
TAG ("%%Number of output units (\\>_ 1)%")
DEFINITION ("the number of classes.")
TAG ("%%Number of units in 1-st hidden layer (\\>_ 0)%")
DEFINITION ("when positive a neural net with at least one hidden layer will be created.")
TAG ("%%Number of units in 2-nd hidden layer (\\>_ 0)%")
DEFINITION ("when positive and %%Number of units in 1-st hidden layer% is positive a neural "
	"net with two hidden layers will be created. When only one of %%Number of units in 1-st hidden layer% "
	"and %%Number of units in 2-nd hidden layer% is positive a net with one hidden layer "
	"will be created.")
TAG ("%%Outputs are linear%") 
DEFINITION ("when selected there will be no non-linearity in the output units. "
	"The default is a %sigmoid non-linearity.")
MAN_END

MAN_BEGIN ("iris data set", "djmw", 19961015)
NORMAL ("A data set with 150 random samples of flowers from the iris species %setosa, "
	"%versicolor, and %virginica. From each species there are 50 observations for "
	"sepal length, sepal width, petal length, and petal width in cm. This dataset was "
	"used by @@Fisher (1936)@ in his initiation of the linear-discriminant-function technique.")
MAN_END

MAN_BEGIN ("FFNet: Pattern", "djmw", 19960918)
INTRO ("A @Pattern is a @Matrix in which each row forms one input vector (pattern) for the neural net.")
NORMAL ("The number of columns is the dimensionality of the input. "
"The number of rows is the number of patterns.")
MAN_END

MAN_BEGIN ("FFNet: Categories", "djmw", 19960918)
INTRO ("The categories for training a neural net with a @Pattern. ")
ENTRY ("Preconditions")
NORMAL ("The number of categories in a @Categories must equal the number of rows in #Pattern.")
MAN_END

MAN_BEGIN ("FFNet: Activation", "djmw", 19960918)
INTRO ("A @Matrix whose elements must be >= 0 and <= 1. "
"Classification: the response of a particular layer in a neural net to a @Pattern."
"Learning: the desired response of the output layer in a neural net to a @Pattern.")
MAN_END

MAN_BEGIN ("FFNet: Principal components", "djmw", 19960918)
INTRO ("When you select @FFNet and @Eigen the decision planes of layer 1 are drawn in the PC-plane.\n")
MAN_END

MAN_BEGIN ("FFNet & Pattern: To Categories...", "djmw", 19960918)
INTRO ("To classify (label) you have to select an @FFNet and a @Pattern object and choose ##To Categories...#.")
ENTRY ("Preconditions")
NORMAL ("The number of columns in a #Pattern must equal the number of input units of #FFNet.")
MAN_END

MAN_BEGIN ("Pattern & Categories: To Feedforward Net...", "djmw", 19960918)
INTRO ("You can choose this command after selecting one @Pattern and one @Categories. "
	"For the specification of the neural net, the number of inputs of the net is taken to be "
	"equal to the number of columns in the #Pattern object. The number of outputs of the net "
	"will be equal to the number of different categories in the #Categories object. "
	"You can specify the number of hidden nodes in maximally two hidden layers. "
	"You can also specify whether the outputs nodes have a non-linearity or not. Standard neural "
	"nets do have a non-linearity.")
MAN_END

MAN_BEGIN ("Pattern & FFNet & Categories: Learn slow...", "djmw", 19960918)
INTRO ("To learn an association you have to select a @FFNet, a @Pattern and a @Categories object.")
ENTRY ("Preconditions")
LIST_ITEM ("The number of columns in a #Pattern must equal the number of input units of #FFNet.")
ENTRY (" Algorithm")
NORMAL ("Steepest descent")
ENTRY ("Preconditions")
LIST_ITEM ("The number of rows in a #Pattern must equal the number of categories in a #Categories.")
LIST_ITEM ("The number of unique categories in a #Categories must equal the number of output units in #FFNet.")
MAN_END

MAN_BEGIN ("Pattern & FFNet & Categories: Learn...", "djmw", 19960918)
INTRO ("You can choose this command after selecting one @Pattern, one @Categories and one @FFNet.")
ENTRY ("Algorithm")
NORMAL ("The minimization procedure is a variant of conjugate gradient minimization, "
	"see for example @@Press et al. (1992)@ chapter 10.")
ENTRY ("Arguments")
TAG ("%%Maximum number of epochs%")
DEFINITION ("the maximum number of times that the complete #Pattern dataset will be presented to the neural net.")
TAG ("%%Tolerance of minimizer%")
DEFINITION ("when the difference in costs between two successive learning cycles is "
"smaller than this value, the minimisation process will be stopped.")
NORMAL ("%%Cost function%")
LIST_ITEM ("minimum squared error:")
LIST_ITEM ("  %cost = \\su__%allPatterns_ \\su__%allOutputs_ (%o__%k_ - d__%k_)^2, where")
LIST_ITEM ("      %o__%k_ : actual output of node %k")
LIST_ITEM ("      %d__%k_ : desired output of node %k")
LIST_ITEM ("minimum cross entropy:")
LIST_ITEM ("  %cost = - \\su__%allPatterns_ \\su__%allOutputs_ (%d__%k_ \\.c ln %o__%k_ + (1-%d__%k_) \\.c ln (1-%o__%k_))")
MAN_END

MAN_BEGIN ("Fisher (1936)", "djmw", 19980114)
NORMAL ("R.A. Fisher (1936), \"The use of multiple measurements in taxonomic "
	"problems\", %%Annals of Eugenics% #7, 179-188.")
MAN_END

}

/* End of file manual_FFNet.c */
