public static void main(String[] args) throws Exception { if (ToolRunner.run(new FeatureMatching(), args) == 1) { System.out.println(".......Feature Match failure........"); System.exit(1); } System.exit(0); }
public void setup(Context context) { try { // System.setProperty("java.library.path", "/home/gathors/proj/libs"); // System.loadLibrary(Core.NATIVE_xxx); // System.loacLibrary("/home/gathors/proj/libs/opencv-300.jar"); } catch (UnsatisfiedLinkError e) { System.err.println("\nNATIVE LIBRARY failed to load..."); System.err.println("ERROR:" + e); System.err.println("NATIVE_LIBRARY_NAME:" + Core.NATIVE_LIBRARY_NAME); System.err.println("#" + System.getProperty("java.library.path")); System.exit(1); } }
public void run() { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); MyCvWindow cvWindow = new MyCvWindow("sample", 512, 512); String filepath = getClass().getResource("lena.jpg").getPath(); Mat image; while (true) { image = Highgui.imread(filepath); cvWindow.showImage(image); Point point = cvWindow.touchedPoint(); if (point != null) { System.out.println("Point(" + point.x + "," + point.y + ")"); } int key = cvWindow.waitKey(40); if (key == MyCvWindow.KEY_ESC) { System.exit(0); } } }
// Transform the json-type feature to mat-type public static Mat json2mat(String json) { JsonParser parser = new JsonParser(); JsonElement parseTree = parser.parse(json); // Verify the input is JSON type if (!parseTree.isJsonObject()) { System.out.println("The input is not a JSON type...\nExiting..."); System.exit(1); } JsonObject jobj = parser.parse(json).getAsJsonObject(); if (jobj == null || !jobj.isJsonObject() || jobj.isJsonNull()) { return null; } // Detect broken/null features JsonElement r = jobj.get("rows"); if (r == null) { return null; } int rows = jobj.get("rows").getAsInt(); int cols = jobj.get("cols").getAsInt(); int type = jobj.get("type").getAsInt(); String data = jobj.get("data").getAsString(); String[] pixs = data.split(","); Mat descriptor = new Mat(rows, cols, type); for (String pix : pixs) { String[] tmp = pix.split(" "); int r_pos = Integer.valueOf(tmp[0]); int c_pos = Integer.valueOf(tmp[1]); double rgb = Double.valueOf(tmp[2]); descriptor.put(r_pos, c_pos, rgb); } return descriptor; }
public int run(String[] args) throws IOException, ClassNotFoundException, InterruptedException { if (args.length != 2) { System.out.println("Usage: FeatureMatching ID <inputName.jpeg/inputName.jpg>"); System.exit(1); } SimpleDateFormat sdf = new SimpleDateFormat("", Locale.US); sdf.applyPattern("yyyy-MM-dd_HH-mm-ss"); String time = sdf.format(new Date()); Job job = Job.getInstance(); ID = "/" + args[0]; String filename = args[1]; filename = filename.toLowerCase(); System.out.println("current filename:" + filename); // Detect illegal username (if the username dir doesn't exist) File userPath = new File(LOCAL_USER_DIR + ID); if (!userPath.exists()) { System.out.println("Unauthorized username!!!\nExiting......"); System.exit(1); } // Preprocess the input image.jpg from local dir: /local.../user/ID/input/image.jpg // Save the features to local dir: hdfs://.../user/ID/input/image.jpg extractQueryFeatures2HDFS(filename, job); // Add the feature file to the hdfs cache String featureFileName = filename.substring(0, filename.lastIndexOf(".")) + ".json"; // job.addCacheFile(new Path(HDFS_HOME + USER + ID + INPUT + "/" + // featureFileName).toUri()); job.getConfiguration() .set("featureFilePath", HDFS_HOME + USER + ID + INPUT + "/" + featureFileName); // Check the file type. Only support jpeg/jpg type images String type = filename.substring(args[1].lastIndexOf(".")); if (!(type.equals(".jpg") || type.equals(".jpeg"))) { System.out.println("Image type not supported!!!\nExiting"); System.exit(1); } // Input: hdfs://.../features/ // The feature dir is a location of all features extracted from the database String inputPathStr = HDFS_HOME + FEATURES; // Output: hdfs://.../user/ID/output/ String outputPathStr = HDFS_HOME + USER + ID + OUTPUT + "/" + time; job.setInputFormatClass(KeyValueTextInputFormat.class); // job.setOutputFormatClass(TextOutputFormat.class); // Get the lists of all feature files: /.../features/data/part-* FileSystem fs = FileSystem.get(job.getConfiguration()); FileStatus[] statuses = fs.listStatus(new Path(inputPathStr)); StringBuffer sb = new StringBuffer(); for (FileStatus fileStatus : statuses) { sb.append(fileStatus.getPath() + ","); } sb.deleteCharAt(sb.lastIndexOf(",")); job.setJarByClass(FeatureMatching.class); job.setMapperClass(FeatureMatchMapper.class); job.setReducerClass(FeatureMatchReducer.class); // only need one reducer to collect the result job.setNumReduceTasks(1); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(Text.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(DoubleWritable.class); // Input a directory, so need the recursive input FileInputFormat.setInputDirRecursive(job, true); // Set the PathFilter, to omit _SUCCESS files // (This is not working correctly, as the PathFilter class is an interface rather than a class. // But the 2nd arg asks me to extend the PathFilter) // FileInputFormat.setInputPathFilter(job, MyPathFilter.class); // // FileInputFormat.setInputPaths(job, new Path(inputPathStr)); FileInputFormat.setInputPaths(job, sb.toString()); FileOutputFormat.setOutputPath(job, new Path(outputPathStr)); boolean success = job.waitForCompletion(true); return success ? 0 : 1; }