home *** CD-ROM | disk | FTP | other *** search
- /* =======================================================
- Neural Network Classes for the NeXT Computer
- Written by: Ralph Zazula
- University of Arizona - Fall 1991
- zazula@pri.com (NeXT Mail)
- ==========================================================*/
- //
- // This feed-forward, back-prop network is setup to learn
- // the identity function (output=input). The number N is the
- // number of inputs (outputs) and H is the number of hidden
- // nodes. The case (H < N) is most interesting - this is a form
- // of data compression.
- // The number P determines the number of random patterns to learn.
- //
-
- #import "BackPropEngine.h"
- #import <stdio.h>
- #define N 4
- #define H 8
- #define P 10
-
- void main()
- {
- //
- // create a Back-Prop network
- //
- id bp = [[BackPropEngine alloc] initWithInputs:N
- hidden:H
- outputs:N];
-
- id random = [[Random alloc] init]; // allocate a Random instance
-
-
- double inputs[P][N];
- double target[P][N];
-
- int i,j;
-
- [bp setEta:0.9]; // set the learning-rate
-
-
- //
- // create the random patterns
- //
- for(j=0; j<P; j++)
- for(i=0; i<N; i++)
- inputs[j][i] = target[j][i] = [random percent]*0.8;
-
- //
- // start the learning loop
- //
- for(j=0; j<100000; j++) {
- for(i=0; i<20; i++) {
- [bp applyInput:inputs[j%P]];
- [bp correctWithTarget:target[j%P]];
- }
- if(!(j % P)) // BPE doesn't average the errors
- printf("%u %e\n",j,[bp getError]); // should do that here...
- }
-
-
- //
- // dump the weights
- //
- for(j=0; j<[[bp inputs] count]; j++) {
- for(i=0; i<[[bp hidden] count]; i++)
- printf("%10e ",[[[bp hidden] objectAt:i]
- getWeightFor:[[bp inputs] objectAt:j]]);
- printf("\n");
- }
- printf("-----\n");
- for(j=0; j<[[bp hidden] count]; j++) {
- for(i=0; i<[[bp outputs] count]; i++)
- printf("%10e ",[[[bp outputs] objectAt:i]
- getWeightFor:[[bp hidden] objectAt:j]]);
- printf("\n");
- }
- }
-