/* MULTI-LAYER PERCEPTRON PROGRAM SOURCE FILE mlp.c 18MAR94 COMMENTARY IN mlpc.htm Author Robert John Morton YE57226C Tab = 3 spaces November 1993 */ #include //for the random number generators rand() & srand() #include //for standard file i/o streams #include //for filelength() definition #include //dynamic allocation of input, output & weights arrays #include //for _clearscreen(); #include //for inter-pattern processing delay #define NL 4 //number of network layers #define NAL 3 //number of active network layers #define R 32767 //maximum value of a bipolar 16-bit interger (short) #define FALSE 0 #define TRUE !FALSE int delay, //seconds delay between processing consecutive patterns NR, //total number of patterns in the input file Training = FALSE, //TRUE = the program is in training mode BeepFlag = FALSE, //TRUE = beep when the program has finished SeedVal = 1; //default value for the random weights generator seeder short N[NL], //Neurons per layer *L[NL], //Access to the layer outputs **W[NL], //Access to all weights *E[NL], //pointers to each layer's errors array **M[NL], //pointers to the pointer arrays that point to each layer's weights *pt, //pointer to the target outputs for training mode SigTab[1025]; //Sigmoid function's look-up table FILE *Wfh, //pointer to the weights file's data structure *Ifh, //pointer to the input file's data structure *Ofh; //pointer to the outout file's data structure /* SAMPLE COMMAND LINE: rob@neddy:~$ mlp robot.wts robot.in robot.out 5B 256 512 256 16 17 argc SAMPLE EXPLANATION 1 *argv[0] MLP PROGRAM NAME MLP.EXE 2 *argv[1] ROBOT.WTS WEIGHTS FILE (MUST HAVE .WTS EXTENSION) 3 *argv[2] ROBOT.IN NAME OF INPUT SOURCE FILE (NO NAMING RESTRICTIONS) 4 *argv[3] ROBOT.OUT NAME OF OUTPUT SINK FILE " 5 *argv[4] 5B FIRST CHARACTER: / = DO NOT DISPLAY NEURAL OUTPUTS 0 = DISPLAY NEURAL OUTPUTS WITH NO DELAY BETWEEN THEM 1 - 9 = DISPLAY NEURAL OUTPUTS EVERY 1 - 9 SECONDS SECOND CHARACTER: B ACTIVATES A BEEP EVERY 10 SECONDS WHEN PROGRAM FINISHED 6 *argv[5] 256 NUMBERS OF INPUT CHANNELS TO THE NETWORK 7 *argv[6] 512 NUMBER OF NEURONS IN LAYER 1 8 *argv[7] 256 NUMBER OF NEURONS IN LAYER 2 9 *argv[8] 16 NUMBER OF NEURONS IN LAYER 3 (OUTPUTS FROM THE NETWORK) 10 *argv[9] 17 SEED VALUE FOR THE RANDOM SERIES GENERATOR THAT INITIALISES THE WEIGHTS FOR TRAINING FOR NORMAL RUNNING: ARGS 0 TO 3 MANDATORY, ARG 4 OPTIONAL FOR TRAINING RUNS: ARGS 0 TO 8 MANDATORY, ARG 9 OPTIONAL */ /* ALLOCATE MEMORY FOR NEURAL INPUT, OUTPUT, ERROR, WEIGHT, MOMENTUM TERM AND TARGET OUTPUT ARRAYS AND LOAD-IN NEURAL INPUT WEIGHTS */ int Walloc(void) { //returns TRUE or FALSE for success or failure int nl, w; //layer number, weight value char *S[2] = {"run", "train"}, *s; //for error messages srand(SeedVal); //initialise random number generator rand() for(nl = 0; nl < NL; nl++) //for each network layer { int ni, NI, nn, NN = (*(N + nl) = getw(Wfh)); //number of neurons in this layer if(NN < 1 || NN > 4096) goto B; //Exit if unable to allocate & set pointer to outputs arrays. if((*(L + nl) = (short *)malloc(NN * sizeof(short))) == NULL) goto A; if (nl) { //provided we are not doing the input layer... short *pw, //pointer to a weight or delta-weight **ppw; //pointer to a pointer to a weight or delta-wt //allocate & set pointer to weights ptr array if(*(W + nl) = (ppw = (short **)malloc(NN * sizeof(short *)))) for(nn = 0; nn < NN; nn++) //allocate storage for & set pointer to inputs if(*(ppw + nn) = (pw = (short *)malloc(NI * sizeof(short)))) for(ni = 0; ni < NI; ni++) { //for each input to this neuron /*if in training mode, generate the next random seed-weight, else if in normal mode input the next connection weight, then store weight in its appropriate array element. */ if(Training) if(w < 0) w = rand(); else w = -rand(); else w = getw(Wfh); *(pw + ni) = w; } else goto A; else goto A; if(Training) { //if in training mode... /* if we're doing the output layer then exit if unable to allocate & set pointer to target output array. */ if(nl == NAL) if((pt = (short *)malloc(NN * sizeof(short))) == NULL) goto A; //Exit if unable to allocate & set pointer to error arrays. if((*(E + nl) = (short *)malloc(NN * sizeof(short))) == NULL) goto A; //if able to allocate & set pointer to delta-wt ptr array if(*(M + nl) = (ppw = (short **)malloc(NN * sizeof(short *)))) /* then for each neuron in this layer, if possible, allocate storage for & set pointer to its inputs. */ for(nn = 0; nn < NN; nn++) if(*(ppw + nn) = (pw = (short *)malloc(NI * sizeof(short)))) //for each input to this neuron clear and store its delta-weight for(ni = 0; ni < NI; ni++) *(pw + ni) = 0; else goto A; else goto A; } } //Set this time's weights array height to next time's weights array width. NI = NN; } return(TRUE); A: if(Training) s = *(S + 1); else s = *S; printf("\nInsufficient memory to %s the network as specified.", s); return(FALSE); B: printf("\nBad data in weights file"); return(FALSE); } // DISPLAYS THE OUTPUT VALUE OF A GIVEN NEURON IN A GIVEN LAYER void ShowOut(int nl, int nn, short o) { #define X 50 //offset of neural outputs table from screen-left #define Y 3 //offset of FIRST ROW OF neural outputs table from screen-top static int flag; //first time through flag if(flag == 0) { flag = 1; _settextposition(Y - 2, X); printf("--------NEURAL OUTPUTS--------"); _settextposition(Y - 1, X); printf("LAYER0 LAYER1 LAYER2 LAYER3"); } _settextposition(Y + nn, X + nl * 8); printf("%6d", o); } void mlp(void) { //MULTI-LAYER PERCEPTRON NETWORK FUNCTION int nl; //index number of current network layer short *po = *L; //pointer to input array for (nl = 1; nl < NL; nl++) { //for each layer of the network int nn, NN = *(N + nl); //number of neurons in this layer short *pi = po, //pointer to current layer's inputs array **ppw = *(W + nl); //pointer to this neuron's input weights po = *(L + nl); //pointer to this neuron's output element for (nn = 0; nn < NN; nn++) { //for each neuron in the layer int ni, a, o, s, //Neuron N§, Activation Level, Output, Sign NI = *(N - 1 + nl); //NI = number of inputs to this neuron short *pw = *(ppw + nn); //pointer to neuron's first input weight long Hi = 0, Lo = 0; //Hi & Lo act as a split 48-bit accumulator /* For each input to this neuron... [see the narrative on the weighted input summation and sigmoid functions].*/ for (ni = 0; ni < NI; ni++) { long P = (long)*(pi + ni) * *(pw + ni); Hi += P >> 16; Lo += P & 0xFFFF; } /* Form the final long value. Preserve its sign. If it is negative reverse its sign. */ if ((s = (a = ((Hi << 1) + (Lo >> 15)) / NI)) < 0) a = -a; /* find which two look-up table entries o is between and interpolate between them to get true value of f(o) */ o = *(SigTab + (ni = a >> 5)); o += ((*(SigTab + ni + 1) - o) * (a & 0x1F)) >> 5; // if original sign of o was negative reverse its sign. if (s < 0) o = -o; *po = o; //store neuron's output in layer output array //display the output value (for use during development only) if(delay != -1) ShowOut(nl, nn, o); } } } void mlptrain ( //MULTI-LAYER PERCEPTRON DELTA-RULE TRAINING FUNCTION int h, //shift factor corresponding to the weight gain term h int a //shift factor corresponding to Momentum Factor a ){ int nl; //current layer number short *pi = *L; //pointer to the input pattern array h += 15; //shift factor to multiply by h / R for (nl = NAL; nl > 0; nl--) { //for each layer of the network... short **ppw = *(W + nl), //pointer to access this layer's weights **ppm = *(M + nl), //pointer to this layer's delta-weights *pe = *(E + nl), //pointer to this layer's output errors *po = *(L + nl); //pointer to this layer's neural outputs int nn, ni, //neuron number, input number to neuron NN = *(N + nl), //neuron number within current layer NI = *(N - 1 + nl); //number of inputs to this layer /* If doing the output layer, prime each element of the error array with -(t[j] - o[j]). */ if (nl == NAL) for (nn = 0; nn < NN; nn++) *(pe + nn) = *(po + nn) - *(pt + nn); pi = *(L + nl - 1); // pointer to start of this layer's inputs //compute the output error for each neuron in this layer... for (nn = 0; nn < NN; nn++) { short m, //momentum term *pw = *(ppw + nn), //pointer to neuron's first weight *pm = *(ppm + nn); //pointer to neuron's first Dweight long //This neuron's output signal and output error o = *(po + nn), e = (((R + o) * (R - o)) >> 15) * *(pe + nn) >> 13; if (e > R) e = R; if (e < -R) e = -R; //constrain it to within the range of a short *(pe + nn) = e; //dF/da = do/da * last time's summation for (ni = 0; ni < NI; ni++) //adjust each input weight *(pw + ni) += (*(pm + ni) = ((m = *(pm + ni)) >> a) - (((e * *(pi + ni)) / NI) >> h)); } if (nl > 1) { //Provided we haven't yet reached the first active layer... short *ps = *(E + nl - 1); //ptr to previous layer's output errors for (ni = 0; ni < NI; ni++) { //for each input weight to this layer... //See mlp( ) for an explanation of the following block of code. long Hi = 0, Lo = 0; for (nn = 0; nn < NN; nn++) { long P = (long)*(pe + nn) * *(*(ppw + nn) + ni); Hi += P >> 16; Lo += P & 0xFFFF; } *(ps + ni) = ((Hi << 1) + (Lo >> 15)) / NN; } } // ... prime the previous layer's error array elements } // with this layer's error * weight summations. } int GetInp(void) { //GET THE NEXT INPUT PATTERN FROM THE INPUT STREAM int i, nn, NN = *N; //input value, input number, total number of inputs short *pi = *L; //points to the input array L1[] for(nn = 0; nn < NN; nn++) { //for each input channel... *(pi + nn) = (i = getw(Ifh)); //get next input if(ferror(Ifh)) { //if an error occurred... //show standard system error message prefixed by "Input source" perror("\nInput source"); //clear possible error previously set against this stream clearerr(Ifh); return(FALSE); //cop out of the loop and return a FAIL indication } if(delay != -1) ShowOut(0, nn, i); //display the input if required } return(TRUE); //return successfully } // GET THE NEXT TARGET OUTPUT PATTERN FROM THE TARGET OUTPUT STREAM int GetOut(void) { int i, nn, //input value, input number NN = *(N + NAL); //total number of inputs short *pi = pt; //points to the target output array for(nn = 0; nn < NN; nn++) { //for each input channel... *(pi + nn) = (i = getw(Ofh)); //get next input if(ferror(Ofh)) { //if an error occurred... //show prefixed standard system error message perror("\nTarget output source"); //clear possible error previously set against this stream clearerr(Ofh); return(FALSE); //cop out of the loop and return a FAIL indication } if(delay != -1) //display the target output if required ShowOut(4, nn, i); } return(TRUE); //return successfully } int PutOut(void) { //PUT CURRENT OUTPUT TO THE OUTPUT STREAM int o, nn, NN = *(N + NAL); //output value, current output number, total number of outputs short *po = *(L + NAL); //points to the outputs array Ln[] for(nn = 0; nn < NN; nn++) { //for each output channel... putw(*(po + nn), Ofh); //store this output. if a possible error value is returned if(ferror(Ofh)) { //and it is indeed found to be truly an error perror("\nOutput sink"); //show standard system error message prefixed by "Output sink" clearerr(Ofh); //clear possible error previously set against this stream return(FALSE); //cop out of the loop and return a FAIL indication } } return(TRUE); //return successfully } // DISPLAY THE WEIGHTS ARRAYS AFTER MEMORY ALLOCATION AND LOADING void ShowWeights(void) { int nl; //network layer number _settextposition(11,1); printf("Neurons per layer:\n"); for(nl = 0; nl < NL; nl++) printf("N[%d] = %d ",nl,*(N + nl)); printf("\nWEIGHTS:"); for(nl = 1; nl < NL; nl++) { //for each network layer int nn, NN = *(N + nl); short **ppw = *(W + nl); //point to appropriate *Wn[] for(nn = 0; nn < NN; nn++) { //for each neuron in layer int ni, NI = *(N - 1 + nl); //number of inputs to neuron short *pw = *(ppw + nn); //points to appropriate Wnn[] printf("\nW%d%d[] = ",nl,nn); for(ni = 0; ni < NI; ni++) //for each input to neuron printf("%5d ",*(pw + ni)); } } } // STORE THE WEIGHTS IN THE .WTS FILE ON COMPLETION OF TRAINING RUN void SaveWeights(void) { int nl; //network layer number putw(*N, Wfh); //store the number of network input channels for(nl = 1; nl < NL; nl++) { //for each network layer int nn, NN = *(N + nl); //neuron number short **ppw = *(W + nl); //point to appropriate *Wn[] putw(NN, Wfh); //store n§ of neurons in layer for(nn = 0; nn < NN; nn++) { //for each neuron in layer int ni, Ni = *(N - 1 + nl); //number of inputs to neuron short *pw = *(ppw + nn); //points to appropriate Wnn[] for(ni = 0; ni < Ni; ni++) //for each input to neuron putw(*(pw + ni), Wfh); //store the weight } } } // DISPLAYS NUMBER OF NEURONS IN EACH LAYER OF THE NETWORK void ShowLayers(void) { int i; /* for each training argument, set line for display of 'N§ of neurons in layer' message */ for(i = 0; i < 4; i++) { _settextposition(6 + i, 1); printf("Nodes in Layer%d %d", i, *(N + i)); } } void ShowTrainPass(void) { //DISPLAYS THE NUMBER OF THE TRAINING DATA PASS static int x; //counter if(x == 0) { _settextposition(9, 1); printf("Training Data Pass N§:"); } _settextposition(9, 24); printf("%d", ++x); } // DISPLAYS THE NUMBER OF THE INPUT PATTERN CURRENTLY BEING PROCESSED void ShowPatCnt(void) { static int x; //counter if(x == 0) { _settextposition(10, 1); printf("Total N§ of Patterns : %d\n",NR); printf("Processing Pattern N§:"); } _settextposition(11, 24); printf("%d", ++x); } // DISPLAYS THE MAGNITUDE OF THE ERROR FUNCTION FOR CURRENT PATTERN void ShowErrFun(int EF) { static int x; //first-time-through flag if(x == 0) { x = 1; _settextposition(12, 1); printf("Error Function Value:"); } _settextposition(12,24); printf("%d", EF); } // RETURNS MAGNITUDE OF ERROR FUNCTION FOR CURRENT INPUT PATTERN short GetErrFun(void) { int nn, //number of neurons in output layer NN = *(N + NAL); //current output layer neuron long Hi = 0, Lo = 0; //Hi & Lo act as a split 48-bit accumulator // points to 1st element of output layer's output array short *po = *(L + NAL); for (nn = 0; nn < NN; nn++) { //for each neuron in the output layer // square of (target output) - (actual outpu)t for this neuron long e, E = (e = *(pt + nn) - *(po + nn)) * e; Hi += E >> 16; //weighted input summation Lo += E & 0xFFFF; //and sigmoid functions. } // the sum of the squares as a long interger returned as a short return((Hi + (Lo >> 16)) / NN); } // MANAGES THE MLP'S BACK-PROPAGATION DELTA-RULE TRAINING PROCESS int TrainingManager(void) { #define AlphaShift 1 //make momentum « of last time's delta-weight /* The maximum acceptable magnitude of the error function for the neural network to be considered trained. */ #define AcceptedError 1 // right-shift equivalent of the gain term denoted by the Greek neta static int NetaShift; for(;;) { //while error is non-zero for at least one training example int nr, //number of [next] pattern flag = TRUE; //'acceptably low errors' flag short EF; //error function // increment and show the number of pass of the training data file ShowTrainPass(); for(nr = 0; nr < NR; nr++) { //for each pattern record in the input file // bail out of loop if abort keystroke detected if(!KbDelay(nr)) return(FALSE); ShowPatCnt(); //show the number of the pattern now being processed if(!GetInp()) return(FALSE); //get the next input pattern mlp(); //present it to the network if(!GetOut()) return(FALSE); //get the next target output pattern ShowErrFun(EF = GetErrFun()); //compute and display the error function /* if the error is not yet small enough to be acceptable, indicate that the error function is still too high and adjust the weights */ if(EF > AcceptedError) { flag = FALSE; mlptrain(NetaShift, AlphaShift); } } // break for(;;) loop if flag remained true for all patterns if(flag == TRUE) break; if(NetaShift < 12) NetaShift += 1; //Reduce the gain factor h rewind(Ifh); //back to start of training input patterns rewind(Ofh); //back to start of training output patterns } SaveWeights(); //save the weights values for the trained network return(TRUE); } int SigLoad(void) { //LOAD THE SIGMOID FUNCTION'S LOOK-UP TABLE int i; FILE *fh; _settextposition(2,1); printf("Loading Sigmoid look-up table"); _settextposition(2,1); /* if the Sigmoid table file can be found and opened load each value in the table into the SigTab[] array. */ if(fh = fopen("SIGMOID.DAT", "rb")) { for (i = 0; i < 1025; i++) *(SigTab + i) = getw(fh); printf("Sigmoid look-up table loaded."); return(TRUE); } printf("Cannot find/load Sigmoid look-up table file sigmoid.dat"); return(FALSE); } int KbDelay(int nr) { //Ctrl C BREAK DETECTOR AND INTER-PATTERN DELAY TIMER if(kbhit()) { //if a keyboard key has been struck if(getch() == '\3') { //if it was a Ctrl-C break character printf("\nProgram aborted by keystroke."); //display error message return(FALSE); //and set error indication } } else { //else (keyboard was not struck) /* if monitoring is required and it is not the first pass, set target time to current time + delay, then, while current time has not yet reached target time, wait! */ if(delay != -1 && nr > 0) { time_t t = time(NULL) + delay; while(time(NULL) < t); } } return(TRUE); //signal OK to proceed } void BeepDelay(void) { //BEEP EVERY 10 SECONDS WHEN PROGRAM HAS FINISHED int flag = TRUE; printf("\nPress any key to return to exit."); while(flag) { time_t t = time(NULL) + 10; //set target time to current time + delay printf("\07"); //beep // while current time not yet reached target time, wait while(time(NULL) < t && flag) if(kbhit()) flag = FALSE; //if a key is hit set exit flag } } // VALIDATES WEIGHTS, INPUT & OUTPUT FILESPECS FROM COMMAND LINE char *CheckFileName ( char *s, //points to the input argument string char *e //points to the prescribed file extension string ) { int i, //char counter b, //backslash location d, //dot location, c; //ASCII character static char S[80]; //storage for the validated argument /* for each character in the input argument... locate the dot within the file name locate the last backslash in the filespec. */ for (i = 0, b = 0, d = 0; *(S + i) = (c = *(s + i)); i++) { if (c == '.' && d == 0) d = i; if (c == '\\') b = i + 1; } if (i - b < 1 || i - b > 12) //exit if too long or too short return(NULL); if (d) { //if file name contains a dot if (d - b > 8) return(NULL); //not more than 8 chars allowed before dot if (*e == '\0' && d == i - 1) //if a specific extension is not required *(S + d) = '\0'; //change dot to a null character else while (c = *e++) { //for each char of the prescribed extension int x; // If character a lower case letterconvert it to upper case. if ((x = *(S + ++d)) > 96 && x < 123) x -= 32; if (c != x) //if character not same as file extension return(NULL); //return 'invalid filespec.' } } else { //else (if no dot was found) if (i - b > 8) return(NULL); //naked filename cannot be more than 8 chars if (*e) { //if a specific extension is required *(S + i++) = '.'; //replace the null character by a dot // add each char of prescribed extension, including terminating null while (*(S + i++) = *e++); } } return(S); } // CHECK AND PROCESS THE NON-FILESPEC COMMAND LINE ARGUMENTS int CheckNonFileArgs(int argc, char **argv) { int i, n, result = TRUE; //index, result flag char *s; if(argc > 4) { //if a 4th argument is present on the command line... /* delay = number of seconds to wait between processing patterns. If the entered delay is out of range, set it to -1 to indicate no delay. */ if((delay = *(s = *(argv + 4)) - 48) < 0 || delay > 9) delay = -1; // determine whether or not to beep every 10 secs when finished BeepFlag = *(s + 1); } // if more arguments are present, assume a training run is intended if(argc > 5) { if(argc < 9) { //4 more arguments are mandatory for training printf("\nWrong number of arguments for a training session."); return(FALSE); } /* For each 'training' argument, get the number of neurons in this layer and display this number as a message in the terminal.*/ for(Training = TRUE, i = 0; i < 4; i++) { *(N + i) = (n = atoi(*(argv + i + 5))); _settextposition(6 + i, 1); printf("Nodes in Layer%d %d", i, n); /* if the entered number of neurons is out of range, indicates an invalid number of neurons in at least one layer. */ if(n < 1 || n > 4096) { printf(" - invalid."); Training = FALSE; } } // exit if an invalid number of neurons was specified if(!Training) return(FALSE); } // input the random weights generator seed value if present if(argc > 9) SeedVal = atoi(*(argv + 8)); return(result); } // CHECK FILESPECS ENTERED ON THE COMMAND LINE AND OPEN THE FILES int CheckFileArgs(int argc, char **argv) { char *Text[] = { //annotation text for filename validation messages "Application File:", "Input Stream: ", "Output Stream: " }, // mandatory extensions for application, input and output filenames *Extn[] = {"WTS", "", ""}, // read/write modes in which each of the above files must be opened *Mode[] = {"rb", "rb", "wb"}, // ditto for when the network is being trained *TMode[] = {"wb", "rb", "rb"}, *s, **m; //pointers to validated filespec and appropriate file mode array int i, result = TRUE; //index, result flag FILE *FH[3]; //array for the weights, input and output file pointers /* if less than 3 arguments (excluding the program EXE filename), display error message and exit FALSE. */ if(argc < 4){printf("\nNot enough command line arguments."); return(FALSE);} if(Training) m = TMode; //set to open files in the modes else m = Mode; //required for training or normal for (i = 0; i < 3; i++) //for each of the 3 filespec command line arguments // if this filespec is valid... if (s = CheckFileName(*(argv + i + 1), *(Extn + i))) /* if a valid file handle is returned, show the name of the successfully opened file */ if (*(FH + i) = fopen(s, *(m + i))) printf("\n%s %s", *(Text + i), s); else { //else indicate that the file opening phase has failed printf("\nCould not find/open %s %s.", *(Text + i), s); result = FALSE; } else { //else, indicate that the filename was invalid printf("\nArg %d: invalid file name. ",i); if (*(s = *(Extn + i))) //if a definite filename extension is required printf(" .%s extension expected (and assumed when omitted).",s); result = FALSE; //indicate that the file opening phase has failed } Wfh = *FH; //set pointer to weights file handle Ifh = *(FH + 1); //set pointer to input file handle Ofh = *(FH + 2); //set pointer to output file handle return(result); } // command line arguments are explained at front of this listing main(int argc, char *argv[ ]) { _clearscreen(_GWINDOW); //clear the screen printf("EBS MULTI-LAYER PERCEPTRON"); //display the program title /* If the Sigmoid function's look-up table found and loaded OK AND all non-filespec arguments valid AND all filespec arguments valid AND all files found & opened AND memory allocated and weights loaded OK... */ if(SigLoad() && CheckNonFileArgs(argc, argv) && CheckFileArgs(argc, argv) && Walloc()) { _settextposition(11,1); //set it to (20,1) if ShowWeights() is active // total number of patterns in input file NR = (filelength(fileno(Ifh)) / (*N)) >> 1; // if in training mode (ie. the /T switch was set on command line)... if(Training) { if(!TrainingManager()) //if training unsuccessful printf("\nTraining process failed."); } else { //else if in normal non-training mode... int nr; //number of [next] pattern ShowLayers(); _settextposition(11,1); //set it to (20,1) if ShowWeights() is active for(nr = 0; nr < NR; nr++) { //for each pattern in the input file... // check for abortive keystroke and inter-pattern delay if(!KbDelay(nr)) break; ShowPatCnt(); //show the number of the pattern now being processed if(!GetInp()) break; //get the next input pattern mlp(); //pass it through the multi-layer perceptron network if(!PutOut()) break; //send the corresponding output pattern } } _settextposition(13,1); printf("Program finished."); if(BeepFlag) BeepDelay(); //keep beeping every 10 seconds when finished. } else //one or more of the initialisation processes failed printf("\nCould not continue."); }