Neural net backpropagation, all outputs tend to 1 and all weights only increase in value (java) -




so trying create own neural network project , challenge myself. have checked math , have made sure correct propagation algorithm outputting numbers tend 1. have checked math , unable find error. anny or tips appreciated , sorry bad variable names, not creative person when comes sort of stuff.

main runner class

public static void main(string[] args) {     net tester = new net();     int[] arrangement = {1, 3, 1};     double[] lister = new double[1];     double[][] inputvalues = {{1.0}, {0.0}};;     double[][] expected = {{1.0}, {0.0}};     double[] returned = null, outstored = new double[1000], expectedstored = new double[10000000];     random rand = new random();     int storer = 0, inter = 0;     tester.settup(arrangement);      for(int j = 0; j < 10000; j++) {         storer = rand.nextint(inputvalues.length);         returned = tester.feedforward(inputvalues[storer]);         tester.backprop(expected[storer], 0.1);           if(returned[0] >= 0.5) {             inter=1;         }else {             inter=0;         }           outstored[outstored.length-1] = inter;         expectedstored[outstored.length-1] = expected[storer][0];         system.out.println("expected " + expected[storer][0] + " got " + returned[0]);      }  } 

net class handles neurons , propagation.

public class net { int [] arrangement; neuron[][] net; double[] inputs; double [][] output; int biggest = 0;  //creates arrays , neurons of appropriate  size public void settup(int [] inarrangement) {      //finds largest layer layers fit in arrays              this.arrangement = inarrangement;     (int h = 0; h < this.arrangement.length; h++) {         this.arrangement[h] += 1;     }      this.biggest = 0;     for(int : this.arrangement) {         if(i > this.biggest) {             this.biggest = i;         }     }     //the first list in net array represent starting neurons.  these neurons never assined or run. hidden layers run     //this means every net must have @ lease 1 hidden layer      //creates arrays     this.net = new neuron[this.arrangement.length][this.biggest];     this.output = new double[this.arrangement.length][this.biggest];      //sets positions in net neurons null default     for(int = 1; < this.arrangement.length; ++) {         for(int b = 0; b < this.biggest; b++) {             this.net[a][b] = new neuron();         }     }      //sets neurons     for(int = 1; < this.arrangement.length; i++) {         for(int j = 0; j < this.arrangement[i]-1; j++) {             this.net[i][j].settup(this.arrangement[i-1], false);         }         //sets bias neurons         this.net[i][this.arrangement[i]-1].settup(this.arrangement[i-1], true);     }   }  //runs net answer public double[] feedforward(double[] inputval) {     this.inputs = inputval;     //sets first list in outputs inputs      for(int y = 0; y < this.inputs.length; y ++) {         this.output[0][y] = inputs[y];     }     this.output[0][this.inputs.length] = 1.0;      //runs hidden layers , final layer     for(int = 1; < this.arrangement.length; ++) {         for(int j = 0; j < this.arrangement[i]; j ++) {             this.output[i][j] = this.net[i][j].getout(this.output[i-1]);         }     }      /*     for(int = 0; < this.arrangement.length; ++) {         for(int b = 0; b < this.biggest; b++) {             system.out.print(output[a][b] + "  ");         }         system.out.println("\n\n");     }     */           return this.output[this.output.length - 1]; }  //this follow propagation algorithm , change weights respective errors. public void backprop(double[] expected, double learningrate){     double error;     double[][][] errorstor = new double[this.arrangement.length][this.biggest][this.biggest];     double [][] deltahidden = new double [this.arrangement.length][this.biggest];      //calculates error weights connecting output neurons calculation different     for(int = 0; < this.arrangement[this.arrangement.length-1]-1; ++) {         deltahidden[deltahidden.length-1][i] = -1 * (expected[i] - this.output[this.output.length-1][i]) * this.output[this.output.length-1][i] * (1 - this.output[this.output.length-1][i]);         for(int j = 0; j < this.arrangement[this.arrangement.length -2]; j ++) {             error = deltahidden[deltahidden.length-1][i] * this.output[this.output.length-2][j];             errorstor[this.net.length-1][i][j] = error;             //this.net[this.net.length-1][i].weights[j] += error * learningrate;         }     }      //calculates error other weights      //for every layer other last layer , first layer not have weights     for(int = 1; < this.arrangement.length -1; ++) {          //for every neuron         for(int j = 0; j < this.arrangement[i]-1; j ++) {             deltahidden[i][j] = 0;             //for every weight next layer connecting layer             for(int h = 0; h < this.arrangement[i+1]-1; h ++) {                 deltahidden[i][j] += deltahidden[i+1][h] * this.net[i+1][h].weights[j];             }             deltahidden[i][j] = deltahidden[i][j] * this.output[i][j] * (1 - this.output[i][j]);              //for every weight neuron previous layer             for(int h = 0; h < this.arrangement[i-1]; h++) {                 error = deltahidden[i][j] * this.output[i-1][h];                 //system.out.println("the error layer " + + " , neuron " + j + " connecting neuron " + h + " in prevois layyer " + error);                 errorstor[i][j][h] = error;                 //this.net[i][j].weights[h] += error * learningrate;             }         }      }     //changes weights @ once     //for every layer     for(int = 1; < this.arrangement.length; i++) {         //for every neuron         for(int j = 0; j < this.arrangement[i]-1; j ++) {             //for every weight held neuron             for(int h = 0; h < this.arrangement[i-1]; h++) {                 this.net[i][j].weights[h] += errorstor[i][j][h] * learningrate;             }         }     }   } 

}

and neuron class handels of data , calculations forward propitation , settup.

import java.util.random; 

public class neuron {

double e = 2.7182818284; int inputnum; double[] weights; double[] input; double output; boolean bias;  //creates arrays , creates random weights starting values //if bias value true neuron bias neuron , output 1 no matter what; public void settup(int i, boolean isbias) {     this.bias = isbias;     if (!this.bias) {         this.inputnum = i;         random rand = new random();          this.weights = new double[this.inputnum];         for(int j = 0; j < this.weights.length; j ++) {             this.weights[j] = rand.nextdouble();         }     } }  //does neurons calculations finding output based on inputs previous layer public double getout(double[] output2) {     if(this.bias) {         return 1.0;     }     this.output = 0;     this.input = output2;      for(int j = 0; j < this.inputnum; j ++) {         this.output += (this.input[j] * this.weights[j]);     }      //sigmoid function      this.output = (1.0 / (1.0 + math.pow(e, (-1.0 * this.output))));     return this.output; } 

}

again appreceated. new ai , java , not experenced programer , have no clue potentual problem. thanks





wiki

Comments

Popular posts from this blog

python - Read npy file directly from S3 StreamingBody -

python - Minimize function with Scipy minimize -

kotlin - Out-projected type in generic interface prohibits the use of metod with generic parameter -