Ejemplo n.º 1
0
  @Test
  public void testWithNull() throws Exception {

    SimpleFeatureSource source = getFeatureSource(MockData.BASIC_POLYGONS);
    SimpleFeatureCollection fc = source.getFeatures();
    SimpleFeatureIterator i = fc.features();
    try {
      SimpleFeature f = (SimpleFeature) i.next();

      FeatureTemplate template = new FeatureTemplate();
      template.description(f);

      // set a value to null
      f.setAttribute(1, null);
      try {
        template.description(f);
      } catch (Exception e) {
        e.printStackTrace();
        fail("template threw exception on null value");
      }

    } finally {
      i.close();
    }
  }
Ejemplo n.º 2
0
 /**
  * Reads the features templates from an input stream and puts them in global map
  *
  * @param input the input stream from which to read the feature templates
  */
 public static void s_readFeatureTemplates(InputStream input) {
   try {
     ArrayList<FeatureTemplate> features = FeatureTemplateLoader.s_readTemplates(input);
     synchronized (s_features) {
       for (FeatureTemplate f : features) {
         s_features.put(f.getName(), f);
       }
     }
   } catch (IOException e) {
     logger.error("IOException while reading device features", e);
   } catch (ParsingException e) {
     logger.error("Parsing exception while reading device features", e);
   }
 }
Ejemplo n.º 3
0
  @Test
  public void testAlternateLookup() throws Exception {
    SimpleFeatureSource source = getFeatureSource(MockData.PRIMITIVEGEOFEATURE);
    SimpleFeatureCollection fc = source.getFeatures();
    SimpleFeatureIterator features = fc.features();
    try {
      SimpleFeature f = features.next();

      FeatureTemplate template = new FeatureTemplate();
      String result = template.template(f, "dummy.ftl", Dummy.class);

      assertEquals("dummy", result);
    } finally {
      features.close();
    }
  }
Ejemplo n.º 4
0
  @Test
  public void testRawValue() throws Exception {
    SimpleFeatureSource source = getFeatureSource(MockData.PRIMITIVEGEOFEATURE);
    SimpleFeatureCollection fc = source.getFeatures();
    SimpleFeatureIterator i = fc.features();
    try {
      SimpleFeature f = (SimpleFeature) i.next();

      FeatureTemplate template = new FeatureTemplate();
      try {
        template.template(f, "rawValues.ftl", FeatureTemplateTest.class);
      } catch (Exception e) {
        e.printStackTrace();
        throw (e);
      }
    } finally {
      i.close();
    }
  }
Ejemplo n.º 5
0
  @Test
  public void testWithDateAndBoolean() throws Exception {

    SimpleFeatureSource source = getFeatureSource(MockData.PRIMITIVEGEOFEATURE);
    SimpleFeatureCollection fc = source.getFeatures();
    SimpleFeatureIterator i = fc.features();
    try {
      SimpleFeature f = (SimpleFeature) i.next();

      FeatureTemplate template = new FeatureTemplate();
      try {
        template.description(f);
      } catch (Exception e) {
        e.printStackTrace();
        fail("template threw exception on null value");
      }
    } finally {
      i.close();
    }
  }
Ejemplo n.º 6
0
  private static void trainCascadeAdaBoost() {
    int imgSize = 30;
    double eachDR = 0.99, eachFAR = 0.5, finalFAR = 0;

    String root = "/home/hadoop/ProgramDatas/MLStudy/FaceDection/";
    // String root = "E:/TestDatas/MLStudy/FaceDection/";
    String dataFile = root + "train_data_2.txt";
    String modelFile = root + "CascadeAdaboost_model.txt";
    String misclassificationFile = root + "mis_classifications.txt";
    String sparkAppName = "Viola-Jones Train";
    String sparkMaster = "spark://localhost:7077";
    int sparkCores = 60;
    String sparkJars = "/home/hadoop/violajones.jar";

    checkMemoryInfo();

    System.out.println("initing feature templates...");
    List<FeatureTemplate> templates = FeatureTemplate.initFeaTemplates();

    System.out.println("initing features...");
    List<HaarLikeFeature> features = HaarLikeFeature.initFeatures(imgSize, imgSize, templates);

    System.out.println("loading train datas...");
    Map<Integer, List<IntegralImage>> trainDatas =
        FileUtils.loadTrainDatasSeparate(dataFile, imgSize, imgSize);
    List<IntegralImage> posDatas = trainDatas.get(1);
    List<IntegralImage> negDatas = trainDatas.get(0);

    SparkConf sparkConf =
        new SparkConf()
            .setMaster(sparkMaster)
            .setAppName(sparkAppName)
            .set("spark.executor.memory", "3g");
    JavaSparkContext sc = new JavaSparkContext(sparkConf);
    sc.addJar(sparkJars);
    sc.setLogLevel("WARN");

    System.out.println("training cascade adaboost...");
    CascadeAdaBoost cascade = new CascadeAdaBoost(posDatas, negDatas, features);
    CascadeClassifier classifier =
        cascade.train(sc, sparkCores, eachDR, eachFAR, finalFAR, modelFile, misclassificationFile);

    System.out.println("exporting model...");
    FileUtils.exportFile(modelFile, classifier.exportModel());

    sc.close();
  }
Ejemplo n.º 7
0
  private static void trainAdaBoost() {
    int imgSize = 24;
    int T = 300;
    // String root = "/home/hadoop/ProgramDatas/MLStudy/FaceDection/";
    String root = "E:/TestDatas/MLStudy/FaceDection/";
    String dataFile = root + "train_data.txt";
    String modelFile = root + "adaboost_model.txt";
    String sparkAppName = "Viola-Jones Train";
    String sparkMaster = "spark://localhost:7077";
    int sparkCores = 60;
    String sparkJars = "/home/hadoop/violajones.jar";

    checkMemoryInfo();

    System.out.println("initing feature templates...");
    List<FeatureTemplate> templates = FeatureTemplate.initFeaTemplates();

    System.out.println("initing features...");
    List<HaarLikeFeature> features = HaarLikeFeature.initFeatures(imgSize, imgSize, templates);

    System.out.println("loading train datas...");
    List<IntegralImage> trainDatas = FileUtils.loadTrainDatas(dataFile, imgSize, imgSize);
    Collections.shuffle(trainDatas);

    SparkConf sparkConf =
        new SparkConf()
            .setMaster(sparkMaster)
            .setAppName(sparkAppName)
            .set("spark.executor.memory", "2g");
    JavaSparkContext sc = new JavaSparkContext(sparkConf);
    sc.addJar(sparkJars);
    sc.setLogLevel("WARN");

    System.out.println("training adaboost...");
    AdaBoost adaBoost = new AdaBoost(trainDatas, features);
    Map<HaarLikeFeature, Double> classifiers = adaBoost.train(sc, sparkCores, T);

    System.out.println("exporting model...");
    List<String> model = new ArrayList<String>();
    for (Entry<HaarLikeFeature, Double> item : classifiers.entrySet()) {
      model.add(item.getKey().toStringWithWeight(item.getValue()));
    }
    FileUtils.exportFile(modelFile, model);

    System.out.println("viola jones training success!");
    sc.close();
  }