// Transform the json-type feature to mat-type public static Mat json2mat(String json) { JsonParser parser = new JsonParser(); JsonElement parseTree = parser.parse(json); // Verify the input is JSON type if (!parseTree.isJsonObject()) { System.out.println("The input is not a JSON type...\nExiting..."); System.exit(1); } JsonObject jobj = parser.parse(json).getAsJsonObject(); if (jobj == null || !jobj.isJsonObject() || jobj.isJsonNull()) { return null; } // Detect broken/null features JsonElement r = jobj.get("rows"); if (r == null) { return null; } int rows = jobj.get("rows").getAsInt(); int cols = jobj.get("cols").getAsInt(); int type = jobj.get("type").getAsInt(); String data = jobj.get("data").getAsString(); String[] pixs = data.split(","); Mat descriptor = new Mat(rows, cols, type); for (String pix : pixs) { String[] tmp = pix.split(" "); int r_pos = Integer.valueOf(tmp[0]); int c_pos = Integer.valueOf(tmp[1]); double rgb = Double.valueOf(tmp[2]); descriptor.put(r_pos, c_pos, rgb); } return descriptor; }
public static void extractQueryFeatures2HDFS(String filename, Job job) throws IOException { // Read the local image.jpg as a Mat Mat query_mat_float = Highgui.imread(LOCAL_USER_DIR + ID + INPUT + "/" + filename, CvType.CV_32FC3); // Convert RGB to GRAY Mat query_gray = new Mat(); Imgproc.cvtColor(query_mat_float, query_gray, Imgproc.COLOR_RGB2GRAY); // Convert the float type to unsigned integer(required by SIFT) Mat query_mat_byte = new Mat(); query_gray.convertTo(query_mat_byte, CvType.CV_8UC3); // // Resize the image to 1/FACTOR both width and height // Mat query_mat_byte = FeatureExtraction.resize(query_mat_byte); // Extract the feature from the (Mat)image Mat query_features = FeatureExtraction.extractFeature(query_mat_byte); System.out.println(PREFIX + "Extracting the query image feature..."); System.out.println("query_mat(float,color):" + query_mat_float); System.out.println("query_mat(float,gray):" + query_gray); System.out.println("query_mat(byte,gray):" + query_mat_byte); System.out.println("query_mat_features:" + query_features); System.out.println(); // Store the feature to the hdfs in order to use it later in different map tasks System.out.println(PREFIX + "Generating the feature file for the query image in HDFS..."); FileSystem fs = FileSystem.get(job.getConfiguration()); String featureFileName = filename.substring(0, filename.lastIndexOf(".")) + ".json"; FSDataOutputStream fsDataOutputStream = fs.create(new Path(HDFS_HOME + USER + ID + INPUT + "/" + featureFileName)); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8)); bw.write(FeatureExtraction.mat2json(query_features)); bw.close(); System.out.println(PREFIX + "Query feature extraction finished..."); System.out.println(); }
public void map(Text key, Text value, Context context) throws InterruptedException, IOException { String filename = key.toString(); String json = value.toString(); // Make sure the input is valid if (!(filename.isEmpty() || json.isEmpty())) { // Change the json-type feature to Mat-type feature Mat descriptor = json2mat(json); if (descriptor != null) { // Read the query feature from the cache in Hadoop Mat query_features; String pathStr = context.getConfiguration().get("featureFilePath"); FileSystem fs = FileSystem.get(context.getConfiguration()); FSDataInputStream fsDataInputStream = fs.open(new Path(pathStr)); StringBuilder sb = new StringBuilder(); // Use a buffer to read the query_feature int remain = fsDataInputStream.available(); while (remain > 0) { int read; byte[] buf = new byte[BUF_SIZE]; read = fsDataInputStream.read(buf, fsDataInputStream.available() - remain, BUF_SIZE); sb.append(new String(buf, 0, read, StandardCharsets.UTF_8)); remain = remain - read; System.out.println("remain:" + remain + "\tread:" + read + "\tsb.size:" + sb.length()); } // Read the query_feature line by line // Scanner sc = new Scanner(fsDataInputStream, "UTF-8"); // StringBuilder sb = new StringBuilder(); // while (sc.hasNextLine()) { // sb.append(sc.nextLine()); // } // String query_json = sb.toString(); // String query_json = new String(buf, StandardCharsets.UTF_8); String query_json = sb.toString(); fsDataInputStream.close(); query_features = json2mat(query_json); // Get the similarity of the current database image against the query image DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); MatOfDMatch matches = new MatOfDMatch(); // Ensure the two features have same length of cols (the feature extracted are all 128 // cols(at least in this case)) if (query_features.cols() == descriptor.cols()) { matcher.match(query_features, descriptor, matches); DMatch[] dMatches = matches.toArray(); // Calculate the max/min distances // double max_dist = Double.MAX_VALUE; // double min_dist = Double.MIN_VALUE; double max_dist = 0; double min_dist = 100; for (int i = 0; i < dMatches.length; i++) { double dist = dMatches[i].distance; if (min_dist > dist) min_dist = dist; if (max_dist < dist) max_dist = dist; } // Only distances ≤ threshold are good matches double threshold = max_dist * THRESHOLD_FACTOR; // double threshold = min_dist * 2; LinkedList<DMatch> goodMatches = new LinkedList<DMatch>(); for (int i = 0; i < dMatches.length; i++) { if (dMatches[i].distance <= threshold) { goodMatches.addLast(dMatches[i]); } } // Get the ratio of good_matches to all_matches double ratio = (double) goodMatches.size() / (double) dMatches.length; System.out.println("*** current_record_filename:" + filename + " ***"); System.out.println("feature:" + descriptor + "\nquery_feature:" + query_features); System.out.println( "min_dist of keypoints:" + min_dist + " max_dist of keypoints:" + max_dist); System.out.println( "total_matches:" + dMatches.length + "\tgood_matches:" + goodMatches.size()); // System.out.println("type:" + descriptor.type() + " channels:" + // descriptor.channels() + " rows:" + descriptor.rows() + " cols:" + descriptor.cols()); // System.out.println("qtype:" + query_features.type() + " // qchannels:" + query_features.channels() + " qrows:" + query_features.rows() + " // qcols:" + query_features.cols()); System.out.println(); if (ratio > PERCENTAGE_THRESHOLD) { // Key:1 Value:filename|ratio context.write(ONE, new Text(filename + "|" + ratio)); // context.write(ONE, new Text(filename + "|" + // String.valueOf(goodMatches.size()))); } } else { System.out.println("The size of the features are not equal"); } } else { // a null pointer, do nothing System.out.println("A broken/null feature:" + filename); System.out.println(); } } }