private void initWeight() {
   for (int i = 0; i < label_size; i++) {
     for (int j = 0; j < feature_size; j++) {
       W[i][j] = MathUtils.gaussian();
     }
   }
 }
 public double train(int[] input, double learningRate, int[] label) {
   double[] p_label = new double[this.label_size];
   for (int i = 0; i < this.label_size; i++) {
     p_label[i] = 0;
     for (int j = 0; j < this.feature_size; j++) {
       p_label[i] += W[i][j] * input[j];
     }
     p_label[i] += bias[i];
   }
   MathUtils.softmax(p_label);
   // 根据
   // http://deeplearning.stanford.edu/wiki/index.php/Softmax%E5%9B%9E%E5%BD%92
   // 进行更新
   for (int i = 0; i < this.label_size; i++) {
     double delta = label[i] - p_label[i];
     for (int j = 0; j < this.feature_size; j++) {
       W[i][j] += learningRate * input[j] * delta;
     }
     bias[i] += learningRate * delta;
   }
   return MathUtils.reconstructError(label, p_label);
 }