public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException { String keyS = key.toString(); if (keyS.startsWith("O") || keyS.startsWith("P") || keyS.startsWith("S")) { String sum = new String(); for (Text val : values) { sum += (" " + val.toString()); } // String subKey = keyS.substring(0,keyS.length()-1); // Text t = new Text(); // t.set(subKey); result.set(sum); context.write(key, result); } if (keyS.startsWith("L")) { // String [] keyIdS = keyS.substring(1).split("[+]"); result.set(" "); context.write(key, result); // String KeyIdS1 = keyIdS[1]; // result.set(KeyIdS1); // context.write(key, result); // String KeyIdS2 = keyIdS[2]; // result.set(KeyIdS2); // context.write(key, result); } }
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String cur_file = ((FileSplit) context.getInputSplit()).getPath().getParent().getParent().getName(); String train_file = context.getConfiguration().get("train_file"); if (cur_file.equals(train_file)) { StringTokenizer st = new StringTokenizer(value.toString()); String word = st.nextToken(); String f_id = st.nextToken(); myKey.set(word); myVal.set(f_id); context.write(myKey, myVal); } else { StringTokenizer st = new StringTokenizer(value.toString()); String word = st.nextToken(); String f_id = st.nextToken(); StringBuilder builder = new StringBuilder(dlt); while (st.hasMoreTokens()) { String filename = st.nextToken(); String tf_idf = st.nextToken(); builder.append(filename); builder.append(dlt); builder.append(tf_idf); builder.append("\t"); } myKey.set(word); myVal.set(builder.toString()); context.write(myKey, myVal); } }
public void map(Object key, Text value, Context context) throws IOException, InterruptedException { String file = value.toString(); String[] lines = file.split("\n"); for (String line : lines) { if (line.contains("<author>") && line.contains("</author>")) { String author = line.substring(8, line.indexOf("</a")); word.set(author); context.write(word, one); } else if (line.contains("<author>")) { String author = line.substring(8); word.set(author); context.write(word, one); } } }
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum++; } context.write(new IntWritable(sum), NullWritable.get()); }
public void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (LongWritable val : values) { sum += val.get(); } result.set(sum); context.write(key, result); }
@Override public void map(Object key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); StringTokenizer tokenizer = new StringTokenizer(line); int movieId = Integer.parseInt(tokenizer.nextToken()); while (tokenizer.hasMoreTokens()) { String word = tokenizer.nextToken(); context.write(new Text("1"), new IntWritable(1)); } }
public void reduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException { System.out.println(PREFIX + "Collecting all the matched results"); for (Text val : values) { String[] tmp = val.toString() .split("\\|"); // The \\ here is very important. Cannot use "|" since split() // need a regex(regular expression), and the vertical bar is // special character. System.out.println("filename:" + tmp[0] + " ratio:" + tmp[1]); String filename = tmp[0]; double ratio = Double.valueOf(tmp[1]); // Key:filename Value:ratio context.write(new Text(filename), new DoubleWritable(ratio)); } }
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException { String[] pair = new String[2]; int count = 0; for (Text txt : values) { pair[count] = txt.toString(); count++; } // word exists in training if (count == 2) { StringTokenizer st_one, st_two; if (pair[0].contains(dlt)) { st_one = new StringTokenizer(pair[1]); st_two = new StringTokenizer(pair[0]); } else { st_one = new StringTokenizer(pair[0]); st_two = new StringTokenizer(pair[1]); } // outputting the data String f_id = st_one.nextToken(); StringBuilder builder = new StringBuilder(dlt); builder.append(f_id); builder.append(dlt); while (st_two.hasMoreTokens()) { String filename = st_two.nextToken(); String tf_idf = st_two.nextToken(); builder.append(filename); builder.append(dlt); builder.append(tf_idf); builder.append("\t"); } myVal.set(builder.toString()); context.write(key, myVal); } }
public void map(Text key, Text value, Context context) throws InterruptedException, IOException { String filename = key.toString(); String json = value.toString(); // Make sure the input is valid if (!(filename.isEmpty() || json.isEmpty())) { // Change the json-type feature to Mat-type feature Mat descriptor = json2mat(json); if (descriptor != null) { // Read the query feature from the cache in Hadoop Mat query_features; String pathStr = context.getConfiguration().get("featureFilePath"); FileSystem fs = FileSystem.get(context.getConfiguration()); FSDataInputStream fsDataInputStream = fs.open(new Path(pathStr)); StringBuilder sb = new StringBuilder(); // Use a buffer to read the query_feature int remain = fsDataInputStream.available(); while (remain > 0) { int read; byte[] buf = new byte[BUF_SIZE]; read = fsDataInputStream.read(buf, fsDataInputStream.available() - remain, BUF_SIZE); sb.append(new String(buf, 0, read, StandardCharsets.UTF_8)); remain = remain - read; System.out.println("remain:" + remain + "\tread:" + read + "\tsb.size:" + sb.length()); } // Read the query_feature line by line // Scanner sc = new Scanner(fsDataInputStream, "UTF-8"); // StringBuilder sb = new StringBuilder(); // while (sc.hasNextLine()) { // sb.append(sc.nextLine()); // } // String query_json = sb.toString(); // String query_json = new String(buf, StandardCharsets.UTF_8); String query_json = sb.toString(); fsDataInputStream.close(); query_features = json2mat(query_json); // Get the similarity of the current database image against the query image DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); MatOfDMatch matches = new MatOfDMatch(); // Ensure the two features have same length of cols (the feature extracted are all 128 // cols(at least in this case)) if (query_features.cols() == descriptor.cols()) { matcher.match(query_features, descriptor, matches); DMatch[] dMatches = matches.toArray(); // Calculate the max/min distances // double max_dist = Double.MAX_VALUE; // double min_dist = Double.MIN_VALUE; double max_dist = 0; double min_dist = 100; for (int i = 0; i < dMatches.length; i++) { double dist = dMatches[i].distance; if (min_dist > dist) min_dist = dist; if (max_dist < dist) max_dist = dist; } // Only distances ≤ threshold are good matches double threshold = max_dist * THRESHOLD_FACTOR; // double threshold = min_dist * 2; LinkedList<DMatch> goodMatches = new LinkedList<DMatch>(); for (int i = 0; i < dMatches.length; i++) { if (dMatches[i].distance <= threshold) { goodMatches.addLast(dMatches[i]); } } // Get the ratio of good_matches to all_matches double ratio = (double) goodMatches.size() / (double) dMatches.length; System.out.println("*** current_record_filename:" + filename + " ***"); System.out.println("feature:" + descriptor + "\nquery_feature:" + query_features); System.out.println( "min_dist of keypoints:" + min_dist + " max_dist of keypoints:" + max_dist); System.out.println( "total_matches:" + dMatches.length + "\tgood_matches:" + goodMatches.size()); // System.out.println("type:" + descriptor.type() + " channels:" + // descriptor.channels() + " rows:" + descriptor.rows() + " cols:" + descriptor.cols()); // System.out.println("qtype:" + query_features.type() + " // qchannels:" + query_features.channels() + " qrows:" + query_features.rows() + " // qcols:" + query_features.cols()); System.out.println(); if (ratio > PERCENTAGE_THRESHOLD) { // Key:1 Value:filename|ratio context.write(ONE, new Text(filename + "|" + ratio)); // context.write(ONE, new Text(filename + "|" + // String.valueOf(goodMatches.size()))); } } else { System.out.println("The size of the features are not equal"); } } else { // a null pointer, do nothing System.out.println("A broken/null feature:" + filename); System.out.println(); } } }
public void map(Object key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); String[] attributes = line.split("[|]"); String tableName = getTableName(line); if (tableName.equalsIgnoreCase("lineitem")) { word.set( "LO" + attributes[0] + "+P" + attributes[1] + "+S" + attributes[2]); // orderkey+partkey+supplykey Text v = new Text(" "); context.write(word, v); } else if (tableName.equalsIgnoreCase("supplier")) { if (line.contains(k2)) { Text v = new Text(attributes[6]); word.set("S" + attributes[0] + "A"); context.write(word, v); word.set("S" + attributes[0] + "B"); context.write(word, v); word.set("S" + attributes[0] + "C"); context.write(word, v); word.set("S" + attributes[0] + "D"); context.write(word, v); } } else if (tableName.equalsIgnoreCase("part")) { if (line.contains(k1)) { Text v = new Text( attributes[1] + " " + attributes[4] + " " + attributes[6] + " " + attributes[8]); word.set("P" + attributes[0] + "A"); context.write(word, v); word.set("P" + attributes[0] + "B"); context.write(word, v); word.set("P" + attributes[0] + "C"); context.write(word, v); word.set("P" + attributes[0] + "D"); context.write(word, v); } } else if (tableName.equalsIgnoreCase("order")) { if (line.contains(k0)) { Text v = new Text(attributes[8]); word.set("O" + attributes[0] + "A"); context.write(word, v); word.set("O" + attributes[0] + "B"); context.write(word, v); word.set("O" + attributes[0] + "C"); context.write(word, v); word.set("O" + attributes[0] + "D"); context.write(word, v); } } else { } }