/** * Extract dense features across the training set. Then clusters are found within those features. */ private AssignCluster<double[]> computeClusters() { System.out.println("Image Features"); // computes features in the training image set features.reset(); for (String scene : train.keySet()) { List<String> imagePaths = train.get(scene); System.out.println(" " + scene); for (String path : imagePaths) { ImageUInt8 image = UtilImageIO.loadImage(path, ImageUInt8.class); describeImage.process(image, features, null); } } // add the features to the overall list which the clusters will be found inside of for (int i = 0; i < features.size; i++) { cluster.addReference(features.get(i)); } System.out.println("Clustering"); // Find the clusters. This can take a bit cluster.process(NUMBER_OF_WORDS); UtilIO.save(cluster.getAssignment(), CLUSTER_FILE_NAME); return cluster.getAssignment(); }
public ExampleClassifySceneKnn( final DescribeImageDense<ImageUInt8, TupleDesc_F64> describeImage, ComputeClusters<double[]> clusterer, NearestNeighbor<HistogramScene> nn) { this.describeImage = describeImage; this.cluster = new ClusterVisualWords(clusterer, describeImage.createDescription().size(), 0xFEEDBEEF); this.nn = nn; // This list can be dynamically grown. However TupleDesc doesn't have a no argument constructor // so // you must to it how to cosntruct the data features = new FastQueue<TupleDesc_F64>(TupleDesc_F64.class, true) { @Override protected TupleDesc_F64 createInstance() { return describeImage.createDescription(); } }; }
/** * For all the images in the training data set it computes a {@link HistogramScene}. That data * structure contains the word histogram and the scene that the histogram belongs to. */ private List<HistogramScene> computeHistograms(FeatureToWordHistogram_F64 featuresToHistogram) { List<String> scenes = getScenes(); List<HistogramScene> memory; // Processed results which will be passed into the k-NN algorithm memory = new ArrayList<HistogramScene>(); for (int sceneIndex = 0; sceneIndex < scenes.size(); sceneIndex++) { String scene = scenes.get(sceneIndex); System.out.println(" " + scene); List<String> imagePaths = train.get(scene); for (String path : imagePaths) { ImageUInt8 image = UtilImageIO.loadImage(path, ImageUInt8.class); // reset before processing a new image featuresToHistogram.reset(); features.reset(); describeImage.process(image, features, null); for (int i = 0; i < features.size; i++) { featuresToHistogram.addFeature(features.get(i)); } featuresToHistogram.process(); // The histogram is already normalized so that it sums up to 1. This provides invariance // against the overall number of features changing. double[] histogram = featuresToHistogram.getHistogram(); // Create the data structure used by the KNN classifier HistogramScene imageHist = new HistogramScene(NUMBER_OF_WORDS); imageHist.setHistogram(histogram); imageHist.type = sceneIndex; memory.add(imageHist); } } return memory; }