/** * Test connecting three clients to one server. * * @throws IOException */ @Test public void connectThreeClientsToOneServer() throws IOException { @SuppressWarnings("rawtypes") Context context = mock(Context.class); when(context.getConfiguration()).thenReturn(conf); ServerData<IntWritable, IntWritable, IntWritable, IntWritable> serverData = MockUtils.createNewServerData(conf, context); WorkerInfo workerInfo = new WorkerInfo(); NettyServer server = new NettyServer( conf, new WorkerRequestServerHandler.Factory(serverData), workerInfo, context); server.start(); workerInfo.setInetSocketAddress(server.getMyAddress()); List<WorkerInfo> addresses = Lists.<WorkerInfo>newArrayList(workerInfo); NettyClient client1 = new NettyClient(context, conf, new WorkerInfo()); client1.connectAllAddresses(addresses); NettyClient client2 = new NettyClient(context, conf, new WorkerInfo()); client2.connectAllAddresses(addresses); NettyClient client3 = new NettyClient(context, conf, new WorkerInfo()); client3.connectAllAddresses(addresses); client1.stop(); client2.stop(); client3.stop(); server.stop(); }
@Override protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException { /* * Added by Xudong Zhang */ if (parseWeibo) { String word = key.toString(); if (!ChineseUtils.allChinese(word)) { context.getCounter("MyCounter", "NumWordsFilteredByWeiboParser").increment(1); return; } } long sum = 0; for (LongWritable value : values) { sum += value.get(); } if (sum >= minSupport) { context.write(key, new LongWritable(sum)); } else { context.getCounter("MyCounter", "NumWordsLessThanMinSupport").increment(1); } }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); minSupport = context .getConfiguration() .getInt(DictionaryVectorizer.MIN_SUPPORT, DictionaryVectorizer.DEFAULT_MIN_SUPPORT); parseWeibo = context.getConfiguration().getBoolean(DictionaryVectorizer.PARSE_WEIBO, false); }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { for (Entry<String, ArrayList<Long>> entry : args.entrySet()) { context.write( new Text(entry.getKey()), new Text( entry.getValue().get(0) + " \t " + entry.getValue().get(1) + "\t" + entry.getValue().size())); } context.write(new Text("0"), new Text(String.valueOf(usrCount))); }
@Override protected void setup(Context context) throws IOException, InterruptedException { // TODO Auto-generated method stub super.setup(context); InputSplit split = context.getInputSplit(); System.out.println("***************Mapper's setup is being executed***************"); FileSplit FS = (FileSplit) split; long datastart = FS.getStart(); System.out.println("***************GetStart() returns " + datastart + " ***************"); long datalongth = FS.getLength(); System.out.println("***************getLength() returns " + datalongth + " ***************"); String[] datalocations = FS.getLocations(); System.out.println( "***************getLocations() returns " + datalocations.length + " locations***************"); for (int i = 0; i < datalocations.length; i++) { System.out.println( "***************No." + i + " location is : " + datalocations[i] + " ***************"); } Path path = FS.getPath(); System.out.println( "***************getLocations() returns " + path.toString() + " ***************"); }
@Override public void map(Object key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); StringTokenizer tokenizer = new StringTokenizer(line, ": "); Boolean firstPageHandled = false; while (tokenizer.hasMoreTokens()) { String pageNum = tokenizer.nextToken().trim().toLowerCase(); if (pageNum == null || pageNum.isEmpty()) { continue; } int outKey = Integer.parseInt(pageNum); int numLinks = 1; if (firstPageHandled) { } else { numLinks = 0; firstPageHandled = true; } context.write(new IntWritable(outKey), new IntWritable(numLinks)); } }
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int i = 0; for (IntWritable value : values) { i++; } context.write(key, new IntWritable(i)); }
public void reduce(Text key, Iterable<IntWritable> values, Context output) throws IOException, InterruptedException { int sum = 0; while (values.iterator().hasNext()) { sum += values.iterator().next().get(); } output.write(key, new IntWritable(sum)); }
@Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String values[] = StringUtils.split(value.toString(), "\t"); for (String word : values) { context.write(new Text(word), new IntWritable(1)); } }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { for (Pair<Integer, Integer> item : linksMap) { Integer[] items = {item.second, item.first}; IntArrayWritable val = new IntArrayWritable(items); context.write(NullWritable.get(), val); } }
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); String[] arr = line.split(","); word.set(arr[0]); context.write(word, one); }
@Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { parser.parse(value); if (parser.isValidTemperature()) { int airTemperature = parser.getAirTemperature(); context.write(new Text(parser.getYear()), new IntWritable(airTemperature)); } else if (parser.isMalformedTemperature()) { System.err.println("Ignoring possibly corrupt input: " + value); context.getCounter(Temperature.MALFORMED).increment(1); } else if (parser.isMissingTemperature()) { context.getCounter(Temperature.MISSING).increment(1); } // dynamic counter context.getCounter("TemperatureQuality", parser.getQuality()).increment(1); }
/* (non-Javadoc) * @see org.apache.hadoop.mapreduce.Mapper#setup(org.apache.hadoop.mapreduce.Mapper.Context) */ protected void setup(Context context) throws IOException, InterruptedException { Configuration config = context.getConfiguration(); fieldDelimRegex = config.get("field.delim.regex", ","); isValidationMode = config.getBoolean("validation.mode", true); classCondtionWeighted = config.getBoolean("class.condition.weighted", false); String predictionMode = config.get("prediction.mode", "classification"); String regressionMethod = config.get("regression.method", "average"); isLinearRegression = predictionMode.equals("regression") && regressionMethod.equals("linearRegression"); }
@Override protected void reduce(Text key, Iterable<IntWritable> value, Context context) throws IOException, InterruptedException { int count = 0; for (IntWritable i : value) { count += i.get(); } t.set(count + ""); context.write(key, t); }
/* (non-Javadoc) * @see org.apache.hadoop.mapreduce.Mapper#map(KEYIN, VALUEIN, org.apache.hadoop.mapreduce.Mapper.Context) */ @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { items = value.toString().split(fieldDelimRegex); outKey.initialize(); outVal.initialize(); if (classCondtionWeighted) { trainEntityId = items[2]; testEntityId = items[0]; rank = Integer.parseInt(items[3]); trainClassAttr = items[4]; trainingFeaturePostProb = Double.parseDouble(items[5]); if (isValidationMode) { // validation mode testClassAttr = items[1]; outKey.add(testEntityId, testClassAttr, rank); } else { // prediction mode outKey.add(testEntityId, rank); } outVal.add(trainEntityId, rank, trainClassAttr, trainingFeaturePostProb); } else { int index = 0; trainEntityId = items[index++]; testEntityId = items[index++]; rank = Integer.parseInt(items[index++]); trainClassAttr = items[index++]; if (isValidationMode) { // validation mode testClassAttr = items[index++]; } outVal.add(trainEntityId, rank, trainClassAttr); // for linear regression add numeric input field if (isLinearRegression) { trainRegrNumFld = items[index++]; outVal.add(trainRegrNumFld); testRegrNumFld = items[index++]; if (isValidationMode) { outKey.add(testEntityId, testClassAttr, testRegrNumFld, rank); } else { outKey.add(testEntityId, testRegrNumFld, rank); } outKey.add(testRegrNumFld); } else { if (isValidationMode) { outKey.add(testEntityId, testClassAttr, rank); } else { outKey.add(testEntityId, rank); } } } context.write(outKey, outVal); }
@Override public void map(Object key, Text value, Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); String keywords = conf.get("keyword"); // System.out.println(keywords); String line = value.toString(); String[] terms = line.split("\t"); // terms[0] is required keyword String[] multiKeywords = keywords.split(" "); boolean foundflag = false; for (int i = 0; i < multiKeywords.length; ++i) { if (multiKeywords[i].equals(terms[0])) { foundflag = true; } } if (foundflag) { context.write(new Text("Keywords"), new Text(terms[0] + "@" + terms[1].toString())); } }
public void map(LongWritable key, Text value, Context output) throws IOException, InterruptedException { parser.parse(value.toString()); String param = jobconf.get("param.color"); if (parser.getColor().equals(param)) { suit = parser.getSuit(); output.write(new Text(suit), one); } }
@Override public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { Integer sum = 0; for (IntWritable val : values) { sum += val.get(); } context.write(key, new IntWritable(sum)); }
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String[] words = value.toString().split(","); StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(words[0]); stringBuilder.append(","); stringBuilder.append(words[1]); String fstring = stringBuilder.toString(); context.write(new CustomWritable(Integer.parseInt(words[2]), 1), new Text(fstring)); }
/** * Test connecting one client to three servers. * * @throws IOException */ @Test public void connectOneClientToThreeServers() throws IOException { @SuppressWarnings("rawtypes") Context context = mock(Context.class); when(context.getConfiguration()).thenReturn(conf); ServerData<IntWritable, IntWritable, IntWritable, IntWritable> serverData = MockUtils.createNewServerData(conf, context); RequestServerHandler.Factory requestServerHandlerFactory = new WorkerRequestServerHandler.Factory(serverData); WorkerInfo workerInfo1 = new WorkerInfo(); workerInfo1.setTaskId(1); NettyServer server1 = new NettyServer(conf, requestServerHandlerFactory, workerInfo1, context); server1.start(); workerInfo1.setInetSocketAddress(server1.getMyAddress()); WorkerInfo workerInfo2 = new WorkerInfo(); workerInfo1.setTaskId(2); NettyServer server2 = new NettyServer(conf, requestServerHandlerFactory, workerInfo2, context); server2.start(); workerInfo2.setInetSocketAddress(server2.getMyAddress()); WorkerInfo workerInfo3 = new WorkerInfo(); workerInfo1.setTaskId(3); NettyServer server3 = new NettyServer(conf, requestServerHandlerFactory, workerInfo3, context); server3.start(); workerInfo3.setInetSocketAddress(server3.getMyAddress()); NettyClient client = new NettyClient(context, conf, new WorkerInfo()); List<WorkerInfo> addresses = Lists.<WorkerInfo>newArrayList(workerInfo1, workerInfo2, workerInfo3); client.connectAllAddresses(addresses); client.stop(); server1.stop(); server2.stop(); server3.stop(); }
/* (non-Javadoc) * @see org.apache.hadoop.mapreduce.Mapper#cleanup(org.apache.hadoop.mapreduce.Mapper.Context) */ protected void cleanup(Context context) throws IOException, InterruptedException { if (isValidationMode) { if (neighborhood.IsInClassificationMode()) { context.getCounter("Validation", "TruePositive").increment(confMatrix.getTruePos()); context.getCounter("Validation", "FalseNegative").increment(confMatrix.getFalseNeg()); context.getCounter("Validation", "TrueNagative").increment(confMatrix.getTrueNeg()); context.getCounter("Validation", "FalsePositive").increment(confMatrix.getFalsePos()); context.getCounter("Validation", "Accuracy").increment(confMatrix.getAccuracy()); context.getCounter("Validation", "Recall").increment(confMatrix.getRecall()); context.getCounter("Validation", "Precision").increment(confMatrix.getPrecision()); } } }
public void map(Object key, Text value, Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); FileSplit split = (FileSplit) context.getInputSplit(); String rootFolder = split.getPath().getParent().toString(); String uri = rootFolder + "/" + value.toString(); // C:/hadoopsample/input/images+"/"+image1.jpg"; FileSystem fs = FileSystem.get(URI.create(uri), conf); FSDataInputStream in = null; try { in = fs.open(new Path(uri)); java.io.ByteArrayOutputStream bout = new ByteArrayOutputStream(); byte buffer[] = new byte[1024 * 1024]; while (in.read(buffer, 0, buffer.length) >= 0) { bout.write(buffer); } context.write(value, new BytesWritable(bout.toByteArray())); } finally { IOUtils.closeStream(in); } }
@Override public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException { Hashtable<String, Integer> wordCounts = new Hashtable<String, Integer>(); ArrayList docName = new ArrayList<String>(); LinkedList wordName = new LinkedList<String>(); while (values.iterator().hasNext()) { String[] items = values.iterator().next().toString().split("@"); if (!wordName.contains(items[0])) { wordName.add(items[0]); } String[] keys = items[1].split(":|,"); for (int i = 0; i < keys.length; i += 2) { if (!docName.contains(keys[i])) { docName.add(keys[i]); wordCounts.put(keys[i], 0); } int tmp = wordCounts.get(keys[i]); tmp += Integer.parseInt(keys[i + 1]); wordCounts.put(keys[i], tmp); } } for (int i = 0; i < docName.size() - 1; ++i) { for (int j = i + 1; j < docName.size(); ++j) { if (wordCounts.get(docName.get(i)) < wordCounts.get(docName.get(j))) { String stmp = docName.get(i).toString(); docName.set(i, docName.get(j).toString()); docName.set(j, stmp); } } } String retKey = wordName.get(0).toString(); for (int i = 1; i < wordName.size(); ++i) { retKey += "," + wordName.get(i); } String retValue = ""; // ="\n" + docName.get(0).toString() + ":" + // wordCounts.get(docName.get(0).toString()); for (int i = 0; i < docName.size(); ++i) { retValue += "\n" + docName.get(i).toString() + ": " + wordCounts.get(docName.get(i).toString()); } context.write(new Text(retKey), new Text(retValue)); }
@Override public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { // TODO Auto-generated method stub String[] line = value.toString().split("\u0001"); if (line.length < 6) { return; } String id = line[0]; String peerid = line[2]; String filename = line[5]; if ((id.equals("2") || id.equals("724")) && Pattern.matches(patternFilename, filename)) { context.write(new Text(id + "\t" + peerid), new IntWritable(1)); } }
@Override public void run() { // TODO Auto-generated method stub System.out.println("write:"); for (Entry<Integer, float[]> entry : maps.entrySet()) { int entryKey = entry.getKey(); float[] value = entry.getValue(); String result = ""; for (int i = 0; i < value.length; i++) { result += value[i] + ","; } result = result.substring(0, result.lastIndexOf(",")); try { context.write(new Text(String.valueOf(entryKey)), new Text(result)); } catch (Exception e) { e.printStackTrace(); } } }
private void writeMapUnionResult( Context context, String appId, String accountId, String platform, String channel, String gameServer, String onlineRecords, String playerType) throws IOException, InterruptedException { String[] keyFields = new String[] {appId, accountId, platform, gameServer}; String[] valFields = new String[] {channel, onlineRecords}; mapKeyObj.setOutFields(keyFields); mapValObj.setOutFields(valFields); mapValObj.setSuffix(playerType); context.write(mapKeyObj, mapValObj); }
@Override public void reduce(NullWritable key, Iterable<IntArrayWritable> values, Context context) throws IOException, InterruptedException { for (IntArrayWritable val : values) { IntWritable[] pair = (IntWritable[]) val.toArray(); Integer pageId = pair[0].get(); Integer count = pair[1].get(); linksMap.add(new Pair<Integer, Integer>(count, pageId)); if (linksMap.size() > N) { linksMap.remove(linksMap.first()); } } for (Pair<Integer, Integer> item : linksMap) { IntWritable id = new IntWritable(item.second); IntWritable value = new IntWritable(item.first); context.write(id, value); } }
public void map(Object key, Text value, Context context) throws IOException, InterruptedException { String[] feature = value.toString().split("\t"); double dist = 0; double[] point = new double[feature.length - 3]; for (int j = 3; j < feature.length; j++) { point[j - 3] = Double.parseDouble(feature[j].trim()); } int len = centroidsList.length; double[] diffList = new double[len]; double minDiff = Double.MAX_VALUE; int minIndex = -1; for (int i = 0; i < len; i++) { try { diffList[i] = calcDist(centroidsList[i], point); } catch (Exception e) { e.printStackTrace(); System.exit(1); } if (diffList[i] < minDiff) { minDiff = diffList[i]; minIndex = i; } } // DoubleWritable minDiffDW = new DoubleWritable(minDiff); Text keyWithCentroid = new Text("" + minIndex); StringBuffer sb = new StringBuffer(); for (int i = 0; i < point.length; i++) { sb.append(point[i] + "\t"); } String pointStr = sb.toString().trim(); Text pointText = new Text(1 + "#" + pointStr); context.write(keyWithCentroid, pointText); }
@Override protected void setup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); this.N = conf.getInt("N", 10); }
@Override protected void reduce( Key_IMOAndRecordTime key, Iterable<TextArrayWritable> LocationList, Context context) throws IOException, InterruptedException { try { context.getCounter(Counters.VESSEL_PROCESSED).increment(1); String IMO_str = LpadNum(key.getIMO().get(), 7); long first_pos_time = key.getRecordTime().get(); ///////////////////////////////////////////////////////////////////////////////// // Populate newPoints with new locations List<VesselLocation> newPoints = new ArrayList<VesselLocation>(); for (TextArrayWritable rowcontent : LocationList) { // population location context.getCounter(Counters.LOCATION_ROWS).increment(1); VesselLocation newlocation = new VesselLocation(); try { Writable[] content = rowcontent.get(); String Latitude = content[16].toString().trim(); String Longitude = content[15].toString().trim(); String Coordinates = Latitude + "," + Longitude; String Speed = content[18].toString().trim(); String Destination = content[9].toString().trim(); String Timestamp = content[21].toString().trim().substring(0, 19); long record_time = DateTime.parse(Timestamp, rawformatter).getMillis(); newlocation.coordinates = Coordinates; newlocation.recordtime = record_time; newlocation.speed = Speed; newlocation.destination = Destination; context.getCounter(Counters.LOCATION_VALID).increment(1); } catch (Exception e) { e.printStackTrace(); context.getCounter(Counters.LOCATION_ERROR).increment(1); continue; } newPoints.add(newlocation); } ///////////////////////////////////////////////////////////////////////////////// // Get last new post time long last_pos_time = newPoints.get(newPoints.size() - 1).recordtime; //////////////////////////////////////////////////////////////////////////////// // Get Existing trackinfo VesselTrackInfo VTI = getTrackInfo(TrackInfo_Table, IMO_str); List<VesselLocation> AllBetweenPoints = new ArrayList<VesselLocation>(); String BeforeRowKey = null; String AfterRowKey = null; // ////////////////////////////////////////////////////////////////////////////// // Retrieve all the existing locations between the first new location and the last new // location. if ((VTI.FirstRecordTime != null) && (VTI.LastRecordTime != null)) { if (last_pos_time < VTI.FirstRecordTime) { AfterRowKey = IMO_str + LpadNum(Long.MAX_VALUE - VTI.FirstRecordTime, 19); } else if (first_pos_time > VTI.LastRecordTime) { BeforeRowKey = IMO_str + LpadNum(Long.MAX_VALUE - VTI.LastRecordTime, 19); } else { AllBetweenPoints = ImportReducer.getLocationsBetween( VTLocation_Table, IMO_str, first_pos_time, last_pos_time); if (AllBetweenPoints.size() == 0) { // Search for the first DB point before the first new point VesselLocation BeforeLocation = getLocationBefore(VTLocation_Table, IMO_str, first_pos_time); BeforeRowKey = IMO_str + LpadNum(Long.MAX_VALUE - BeforeLocation.recordtime, 19); AfterRowKey = BeforeLocation.nextlocation; } else { java.util.Collections.sort(AllBetweenPoints); BeforeRowKey = AllBetweenPoints.get(0).previouslocation; AfterRowKey = AllBetweenPoints.get(AllBetweenPoints.size() - 1).nextlocation; } List<Delete> deletes = ImportReducer.GetDeleteEventsBetween( VTEvent_Table, IMO_str, first_pos_time, last_pos_time); ImportReducer.DeleteEvents(VTEvent, deletes); VTEvent.flush(); } } // Find out the location before the first new location in VesselLocation BeforeLocation = getLocation(VTLocation_Table, BeforeRowKey); // Find out the location after the last new location in VesselLocation AfterLocation = getLocation(VTLocation_Table, AfterRowKey); Map<Integer, VesselEvent> PreviousZoneEvents = new HashMap<Integer, VesselEvent>(); ; Map<Integer, VesselEvent> AfterZoneEvents = new HashMap<Integer, VesselEvent>(); if (BeforeLocation != null) { // Get all events with exit at last location PreviousZoneEvents = getAllEventsStartBeforeEndAfterBeforeLocation(VTEvent_Table, IMO_str, BeforeLocation); } //////////////////////////////////////////////////// // Analyze and calculate previous and next location for (VesselLocation newlocation : newPoints) { int index = AllBetweenPoints.indexOf(newlocation); if (index != -1) { VesselLocation dblocation = AllBetweenPoints.get(index); dblocation.coordinates = newlocation.coordinates; dblocation.destination = newlocation.destination; dblocation.speed = newlocation.speed; } else { AllBetweenPoints.add(newlocation); } } java.util.Collections.sort(AllBetweenPoints); String previousRowKey = null; for (VesselLocation location : AllBetweenPoints) { location.previouslocation = previousRowKey; previousRowKey = IMO_str + LpadNum(Long.MAX_VALUE - location.recordtime, 19); } String NextRowKey = null; for (int i = (AllBetweenPoints.size() - 1); i >= 0; i--) { VesselLocation location = AllBetweenPoints.get(i); location.nextlocation = NextRowKey; NextRowKey = IMO_str + LpadNum(Long.MAX_VALUE - location.recordtime, 19); } AllBetweenPoints.get(0).previouslocation = BeforeRowKey; AllBetweenPoints.get(AllBetweenPoints.size() - 1).nextlocation = AfterRowKey; //////////////////////////////////////////////////// // Upsert all locations for (VesselLocation location : AllBetweenPoints) { // population location try { byte[] rowkey = Bytes.toBytes(IMO_str + LpadNum(Long.MAX_VALUE - location.recordtime, 19)); Put put = new Put(rowkey); put.addColumn(details, speed, Bytes.toBytes(location.speed)); put.addColumn(details, destination, Bytes.toBytes(location.destination)); put.addColumn(details, coordinates, Bytes.toBytes(location.coordinates)); put.addColumn( details, timestamp, Bytes.toBytes(new DateTime(location.recordtime).toString(rawformatter))); if (location.previouslocation != null) { put.addColumn(details, previouslocation, Bytes.toBytes(location.previouslocation)); } if (location.nextlocation != null) { put.addColumn(details, nextlocation, Bytes.toBytes(location.nextlocation)); } VTLocation.mutate(put); } catch (Exception e) { e.printStackTrace(); context.getCounter(Counters.LOCATION_ERROR).increment(1); continue; } } // update before next location and after previous location if (BeforeRowKey != null) { Put BeforeLocationPut = new Put(Bytes.toBytes(BeforeRowKey)); BeforeLocationPut.addColumn( details, nextlocation, Bytes.toBytes( IMO_str + LpadNum(Long.MAX_VALUE - AllBetweenPoints.get(0).recordtime, 19))); VTLocation.mutate(BeforeLocationPut); } if (AfterRowKey != null) { Put AfterLocationPut = new Put(Bytes.toBytes(AfterRowKey)); AfterLocationPut.addColumn( details, previouslocation, Bytes.toBytes( IMO_str + LpadNum( Long.MAX_VALUE - AllBetweenPoints.get(AllBetweenPoints.size() - 1).recordtime, 19))); VTLocation.mutate(AfterLocationPut); } VTLocation.flush(); ///////////////////////////////////////////////////////////////////// // Store latest location // rowkey: global zone id (4)+ longlat22 // ((long11(sign(1)+integer(3)+digit(7)))(lat11(sign(1)+integer(3)+(7))))+imo(7)+recordtime(19) ///////////////////////////////////////////////////////////////////// Put vessel_track_info = new Put(Bytes.toBytes(IMO_str)); if (AfterLocation == null) { // Get the last location VesselLocation lastLocation = AllBetweenPoints.get(AllBetweenPoints.size() - 1); // update the last location String[] longlat = lastLocation.coordinates.split(","); GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); Coordinate coord = new Coordinate(Double.parseDouble(longlat[1]), Double.parseDouble(longlat[0])); Point point = geometryFactory.createPoint(coord); Integer BelongedGlobalZoneIndex = null; for (int i = 0; i < VesselZone.GlobalZones.length; i++) { if (VesselZone.GlobalZones[i].covers(point)) { BelongedGlobalZoneIndex = i; break; } } if (VTI.LastLocation != null) { LastLocation_BM.mutate(new Delete(VTI.LastLocation)); } byte[] lastlocationrowkey = Bytes.toBytes( LpadNum(BelongedGlobalZoneIndex, 4) + ConvertCoordinatesToStr(longlat[1]) + ConvertCoordinatesToStr(longlat[0])); Put lastlocation_put = new Put(lastlocationrowkey); lastlocation_put.addColumn(details, imo, Bytes.toBytes(IMO_str)); lastlocation_put.addColumn( details, timestamp, Bytes.toBytes(new DateTime(lastLocation.recordtime).toString(rawformatter))); LastLocation_BM.mutate(lastlocation_put); LastLocation_BM.flush(); vessel_track_info.addColumn(details, lastlocation, lastlocationrowkey); vessel_track_info.addColumn( details, lastrecordtime, Bytes.toBytes(new DateTime(lastLocation.recordtime).toString(rawformatter))); } else { // Get events that start before last new location and end after last new location AfterZoneEvents = getAllEventsStartBeforeEndAfter(VTEvent_Table, IMO_str, AfterLocation.recordtime); } // update firstrecordtime and lastrecordtime if (BeforeLocation == null) { vessel_track_info.addColumn( details, firstrecordtime, Bytes.toBytes( new DateTime(AllBetweenPoints.get(0).recordtime).toString(rawformatter))); } if (!vessel_track_info.isEmpty()) { TrackInfo_BM.mutate(vessel_track_info); TrackInfo_BM.flush(); } //////////////////////////////////////////////////////////////////// ArrayList<VesselEvent> DerivedEventList = new ArrayList<VesselEvent>(); /////////////////////////////////////////////////////////// // Get Vessel String VesselType = getVesselType(Vessel_Table, IMO_str); if (VesselType == null) { context.getCounter(Counters.VESSEL_WITHOUTTYPE).increment(1); return; } // calculating event for (VesselLocation VL : AllBetweenPoints) { ArrayList<Integer> CurrentZones = LocateCurrentZone(VL.coordinates, VesselType, Zonemap); Iterator<Map.Entry<Integer, VesselEvent>> it = PreviousZoneEvents.entrySet().iterator(); while (it.hasNext()) { Map.Entry<Integer, VesselEvent> thisEntry = it.next(); int Zone_Axsmarine_id = thisEntry.getKey(); if (!CurrentZones.contains(Zone_Axsmarine_id)) { VesselEvent PreviousEvent = thisEntry.getValue(); if (!DerivedEventList.contains(PreviousEvent)) { DerivedEventList.add(PreviousEvent); } // remove close event from PreviousZoneEvents; it.remove(); } } for (Integer thisZone_Axsmarine_id : CurrentZones) { if (PreviousZoneEvents.containsKey(thisZone_Axsmarine_id)) { ////////////////////////////////////////////////// // For current zones which both previous and current locations belong to, update exit // point of previous open events with current locations. ////////////////////////////////////////////////// VesselEvent PreviousEvent = PreviousZoneEvents.get(thisZone_Axsmarine_id); PreviousEvent.exitcoordinates = VL.coordinates; PreviousEvent.exittime = VL.recordtime; PreviousEvent.destination = VL.destination; if (!DerivedEventList.contains(PreviousEvent)) { DerivedEventList.add(PreviousEvent); } } else { ////////////////////////////////////////////////// // For current zones which only current locations belong to, fire new open events ////////////////////////////////////////////////// VesselEvent NewEvent = new VesselEvent(); NewEvent.entrycoordinates = VL.coordinates; NewEvent.entrytime = VL.recordtime; NewEvent.exitcoordinates = VL.coordinates; NewEvent.exittime = VL.recordtime; NewEvent.destination = VL.destination; NewEvent.polygonid = thisZone_Axsmarine_id; PreviousZoneEvents.put(thisZone_Axsmarine_id, NewEvent); DerivedEventList.add(NewEvent); } } } /////////////////////////////////////////////////////////////////////////////////////// // Merge with PreviousZoneEvents with AfterZoneEvents Iterator<Map.Entry<Integer, VesselEvent>> it = AfterZoneEvents.entrySet().iterator(); while (it.hasNext()) { Map.Entry<Integer, VesselEvent> thisEntry = it.next(); int Zone_Axsmarine_id = thisEntry.getKey(); VesselEvent After_VE = thisEntry.getValue(); VesselEvent Previous_VE = PreviousZoneEvents.get(Zone_Axsmarine_id); if (Previous_VE != null) { Previous_VE.exitcoordinates = After_VE.exitcoordinates; Previous_VE.exittime = After_VE.exittime; Previous_VE.destination = After_VE.destination; if (!DerivedEventList.contains(Previous_VE)) { DerivedEventList.add(Previous_VE); } } else { VesselEvent NewEvent = new VesselEvent(); NewEvent.entrycoordinates = AfterLocation.coordinates; NewEvent.entrytime = AfterLocation.recordtime; NewEvent.exitcoordinates = After_VE.exitcoordinates; NewEvent.exittime = After_VE.exittime; NewEvent.destination = After_VE.destination; NewEvent.polygonid = Zone_Axsmarine_id; DerivedEventList.add(NewEvent); } // Delete This Event from HBase DeleteEvent(VTEvent, IMO_str, After_VE); } VTEvent.flush(); // pupulate Derived Events into Hbase for (VesselEvent newEvent : DerivedEventList) { // rowkey: IMO(7)+timestamp(19 desc)+polygonid(8) // qualifier:entrytime,entrycoordinates,exittime,exitcoordinates,destination context.getCounter(Counters.EVENT_UPSERTS).increment(1); byte[] rowkey = Bytes.toBytes( IMO_str + LpadNum(Long.MAX_VALUE - newEvent.entrytime, 19) + LpadNum(newEvent.polygonid, 10)); Put put = new Put(rowkey); put.addColumn( details, entrytime, Bytes.toBytes(new DateTime(newEvent.entrytime).toString(rawformatter))); put.addColumn(details, entrycoordinates, Bytes.toBytes(newEvent.entrycoordinates)); put.addColumn( details, exittime, Bytes.toBytes(new DateTime(newEvent.exittime).toString(rawformatter))); put.addColumn(details, exitcoordinates, Bytes.toBytes(newEvent.exitcoordinates)); put.addColumn(details, destination, Bytes.toBytes(newEvent.destination)); VTEvent.mutate(put); context.getCounter(Counters.EVENT_VALID).increment(1); } // VTLocation.flush(); Moved to the first step VTEvent.flush(); } catch (RuntimeException e) { // TODO Auto-generated catch block System.out.println("Exception occured while loading data for:" + key.getIMO()); throw e; } }