/** * get meta, that is correct target of timestamp if present * * @param timestamp */ private ChunkMeta getChunkMeta(BTreeMap<Integer, ChunkMeta> chunkMetaMap, int timestamp) { int timestamp_year = TimeUtil.roundLowerYear(timestamp); int timestamp_next_year = TimeUtil.roundNextYear(timestamp); Integer key = chunkMetaMap.ceilingKey(timestamp_year); if (key == null) { return null; } if (timestamp_next_year <= key) { return null; } ChunkMeta chunkMeta = chunkMetaMap.get(key); throwNull(chunkMeta); return chunkMeta; }
public void insertSensorData(String stationName, String sensorName, DataEntry[] data) { // log.info("streamDB insert data "+stationName+" "+sensorName+" "+data.length); throwNull(stationName); throwNull(sensorName); throwNull(data); if (data.length == 0) { log.warn("no data to insert"); return; } SensorMeta sensorMeta = getSensorMeta(stationName, sensorName, true); BTreeMap<Integer, ChunkMeta> chunkMetaMap = getSensorChunkMetaMap(sensorMeta); BTreeMap<Integer, Chunk> chunkMap = getSensorChunkMap(sensorMeta); int timestamp_next_year = Integer.MIN_VALUE; ArrayList<DataEntry> entryList = new ArrayList<DataEntry>(data.length); int prevTimestamp = -1; for (DataEntry entry : data) { if (entry.timestamp <= prevTimestamp) { throw new RuntimeException( "not ordered timestamps " + TimeUtil.oleMinutesToText(prevTimestamp) + " " + TimeUtil.oleMinutesToText(entry.timestamp) + " " + entry.value + " " + stationName + "/" + sensorName); } if (entry.timestamp < timestamp_next_year) { entryList.add(entry); } else { if (!entryList.isEmpty()) { insertIntoOneChunk(chunkMetaMap, chunkMap, entryList); } timestamp_next_year = TimeUtil.roundNextYear(entry.timestamp); entryList.clear(); entryList.add(entry); } prevTimestamp = entry.timestamp; } if (!entryList.isEmpty()) { insertIntoOneChunk(chunkMetaMap, chunkMap, entryList); } }
public static List<LabeledProperty> parse(YamlMap entry) { String station = entry.getString("station"); String startObject = entry.optString("start", "*"); String endObject = entry.optString("end", "*"); int start = TimeUtil.parseStartTimestamp(startObject); int end = TimeUtil.parseEndTimestamp(endObject); log.trace(startObject); log.trace(endObject); List<LabeledProperty> result = new ArrayList<LabeledProperty>(); for (YamlMap map : entry.getList("content").asMaps()) { try { String label = map.getString("label"); LabeledProperty labeledProperty = new LabeledProperty(station, label, start, end, map); result.add(labeledProperty); log.trace(labeledProperty); } catch (Exception e) { log.warn("could not parse entry " + entry + " " + e); } } return result; }
private void insertIntoOneChunk( BTreeMap<Integer, ChunkMeta> chunkMetaMap, BTreeMap<Integer, Chunk> chunkMap, ArrayList<DataEntry> entryList) { // int timestamp_chunk = TimeConverter.roundLowerYear(entryList.get(0).timestamp); int timestamp_next_year = TimeUtil.roundNextYear(entryList.get(0).timestamp); if (timestamp_next_year <= entryList.get(entryList.size() - 1).timestamp) { throw new RuntimeException("data of more than one chunk"); } // ChunkMeta oldChunkMeta = chunkMetaMap.get(timestamp_chunk); ChunkMeta oldChunkMeta = getChunkMeta(chunkMetaMap, entryList.get(0).timestamp); if (oldChunkMeta == null) { insertChunk(chunkMetaMap, chunkMap, Chunk.of(entryList)); } else { Chunk oldChunk = chunkMap.get(oldChunkMeta.firstTimestamp); Iterator<DataEntry> oldIt = Arrays.stream(oldChunk.data).iterator(); Iterator<DataEntry> newIt = entryList.iterator(); ArrayList<DataEntry> resultList = new ArrayList<DataEntry>(); DataEntry old_curr = oldIt.hasNext() ? oldIt.next() : null; DataEntry new_curr = newIt.hasNext() ? newIt.next() : null; while (old_curr != null || new_curr != null) { if (old_curr != null) { if (new_curr != null) { if (old_curr.timestamp == new_curr.timestamp) { // overwrite old data with new data resultList.add(new_curr); old_curr = oldIt.hasNext() ? oldIt.next() : null; new_curr = newIt.hasNext() ? newIt.next() : null; } else if (old_curr.timestamp < new_curr.timestamp) { resultList.add(old_curr); old_curr = oldIt.hasNext() ? oldIt.next() : null; } else { resultList.add(new_curr); new_curr = newIt.hasNext() ? newIt.next() : null; } } else { resultList.add(old_curr); old_curr = oldIt.hasNext() ? oldIt.next() : null; } } else { resultList.add(new_curr); new_curr = newIt.hasNext() ? newIt.next() : null; } } removeChunk(chunkMetaMap, chunkMap, oldChunkMeta); insertChunk(chunkMetaMap, chunkMap, Chunk.of(resultList)); } }
public static void main(String[] args) { // final int source_count = Interpolated.STATION_INTERPOLATION_COUNT; final int source_count = 6; TsDB tsdb = TsDBFactory.createDefault(); ContinuousGen continuousGen = QueryPlanGenerators.getContinuousGen(tsdb, DataQuality.STEP); String[] schema = new String[] {"Ta_200"}; String targetPlot = "SEG29"; // String targetPlot = "HEG01"; long start = TimeUtil.ofDateStartHour(2014, 4); long end = TimeUtil.ofDateEndHour(2014, 6); long removeStart = TimeUtil.ofDateStartHour(2014, 6); long removeEnd = TimeUtil.ofDateEndHour(2014, 6); ArrayList<String> result = new ArrayList<String>(); Station targetStation = tsdb.getStation(targetPlot); String[] sourcePlots = targetStation .nearestStations .stream() .limit(source_count) .map(s -> s.stationID) .toArray(String[]::new); Continuous targetNode = continuousGen.get(targetStation.stationID, schema); Continuous targetNodeIntervalRemoved = IntervalRemove.of(targetNode, removeStart, removeEnd); Continuous[] sourceNodes = Arrays.stream(sourcePlots) .map(s -> continuousGen.get(s, schema)) .toArray(Continuous[]::new); for (Continuous source : sourceNodes) { TsIterator it = source.get(start, end); int count = 0; while (it.hasNext()) { TsEntry e = it.next(); if (Float.isFinite(e.data[0])) { count++; } } result.add(source.getSourceStation().stationID + " " + count); } for (String e : result) { System.out.println(e); } Continuous resultNode = Interpolated.of(tsdb, targetNodeIntervalRemoved, sourceNodes, schema); // double[] maxMSEs = new double[]{}; // Continuous resultNode = new InterpolatedAverageLinear(tsdb, targetNodeIntervalRemoved, // targetNodeIntervalRemoved, sourceNodes, schema, AggregationInterval.DAY, maxMSEs); TsIterator it = resultNode.get(start, end); int count = 0; while (it.hasNext()) { TsEntry e = it.next(); if (Float.isFinite(e.data[0])) { count++; } // System.out.println(e); } System.out.println("" + count); String path = "C:/timeseriesdatabase_R/"; targetNode.writeCSV(removeStart, removeEnd, path + targetPlot + "_real.csv"); resultNode.writeCSV(removeStart, removeEnd, path + targetPlot + "_interpolated.csv"); sourceNodes[0].writeCSV( removeStart, removeEnd, path + sourceNodes[0].getSourceStation().stationID + "_real.csv"); }