@Override public Boolean caseAction(Action action) { for (Port port : action.getOutputPattern().getPorts()) { int numTokens = action.getOutputPattern().getNumTokensMap().get(port); if (numTokens > 1) { Detector detector = new Detector(); return detector.doSwitch(action.getBody()); } } return false; }
/** Reads fundamental diagram parameters from AIMSUN generated xml */ public void readFundamentalDiagramsFromXML_AIMSUN() { for (int key : detectors.keySet()) { Detector d = detectors.get(key); // find the corresponding FD profile int i; for (i = 0; i < this.mainScenario .getFundamentalDiagramProfileSet() .getFundamentalDiagramProfile() .size(); i++) { if (Integer.parseInt( this.mainScenario .getFundamentalDiagramProfileSet() .getFundamentalDiagramProfile() .get(i) .getLinkId()) == d.getLinkAssoc()) { break; } } BigDecimal vf = this.mainScenario .getFundamentalDiagramProfileSet() .getFundamentalDiagramProfile() .get(i) .getFundamentalDiagram() .get(0) .getFreeFlowSpeed(); BigDecimal q_max = this.mainScenario .getFundamentalDiagramProfileSet() .getFundamentalDiagramProfile() .get(i) .getFundamentalDiagram() .get(0) .getCapacity(); BigDecimal rhojam = this.mainScenario .getFundamentalDiagramProfileSet() .getFundamentalDiagramProfile() .get(i) .getFundamentalDiagram() .get(0) .getJamDensity(); double w = q_max.doubleValue() / (rhojam.doubleValue() - q_max.doubleValue() / vf.doubleValue()); d.getFdParams().setFD(vf.doubleValue(), w, q_max.doubleValue() / d.getNumberOfLanes()); detectors.put(key, d); } }
@Override public void run() { debug("Enter the run loop!"); while (isAlive) { if (isAwake && wakeupTime + AWAKE_MODE_TIMEOUT < System.currentTimeMillis()) { isAwake = false; } detector.update(); synchronized (this) { try { if (isAwake) { wait(TICK_AWAKE); } else { debug("zzzZZZ..."); wait(TICK_SLEEP); } } catch (Exception e) { } } } debug("Leave the run loop!"); }
/** Reads the SensorList from mainScenario and populates the detectors hashmap */ public void createDetectorListFromMainScenario() { String sensorIDString; for (int i = 0; i < this.mainScenario.getSensorList().getSensor().size(); i++) { Detector d = new Detector(); sensorIDString = this.mainScenario .getSensorList() .getSensor() .get(i) .getParameters() .getParameter() .get(7) .getValue(); d.setSensorID(Integer.parseInt(sensorIDString)); d.setSensorType(this.mainScenario.getSensorList().getSensor().get(i).getType()); d.setSourceAddress("n/a"); d.setLinkAssoc( Integer.parseInt( this.mainScenario.getSensorList().getSensor().get(i).getLinkReference().getId())); detectors.put(d.getSensorID(), d); } }
@Override public void renderAround(FocusedWindow g, Graphics gr, double deltaTime) { if (g instanceof LevelEditor) { super.renderAround(g, gr, deltaTime); } }
// methods public Detector calibrateParameters(Detector d) { if (d.getHealthStatus() != 100) { // assign NaN if detector is bad vf = Double.NaN; w = Double.NaN; q_max = Double.NaN; } else { // organize into an array of DataPoint ArrayList<DataPoint> datavec = new ArrayList<DataPoint>(); int i; for (i = 0; i < d.getDensityData().size(); i++) datavec.add( new DataPoint( d.getDensityData().get(i), d.getFlowData().get(i), d.getSpeedData().get(i))); // maximum flow and its corresponding density DataPoint maxflw = new DataPoint(0, Double.NEGATIVE_INFINITY, 0); for (i = 0; i < d.getFlowData().size(); i++) if (datavec.get(i).flw > maxflw.flw) maxflw.setval(datavec.get(i)); q_max = maxflw.flw; // split data into congested and freeflow regimes ............... ArrayList<DataPoint> congestion = new ArrayList<DataPoint>(); // congestion states ArrayList<DataPoint> freeflow = new ArrayList<DataPoint>(); // freeflow states for (i = 0; i < d.getDensityData().size(); i++) if (datavec.get(i).dty >= maxflw.dty) congestion.add(datavec.get(i)); else freeflow.add(datavec.get(i)); // vf is the average freeflow speed vf = percentile("spd", freeflow, 0.5f); // compute critical density rho_crit = q_max / vf; // BINNING ArrayList<DataPoint> supercritical = new ArrayList<DataPoint>(); // data points above rho_crit for (i = 0; i < d.getDensityData().size(); i++) if (datavec.get(i).dty >= rho_crit) supercritical.add(datavec.get(i)); // sort supercritical w.r.t. density Collections.sort(supercritical); int numsupercritical = supercritical.size(); int Bin_width = 10; int step = Bin_width; ArrayList<DataPoint> BinData = new ArrayList<DataPoint>(); for (i = 0; i < numsupercritical; i += Bin_width) { if (i + Bin_width >= numsupercritical) step = numsupercritical - i; if (step != 0) { List<DataPoint> Bin = (List<DataPoint>) supercritical.subList(i, i + step); if (!Bin.isEmpty()) { double a = 2.5f * percentile("flw", Bin, 0.75f) - 1.5f * percentile("flw", Bin, 0.25f); double b = percentile("flw", Bin, 1f); BinData.add(new DataPoint(percentile("dty", Bin, 0.5f), Math.min(a, b), Float.NaN)); } } } // Do constrained LS ArrayList<Double> ai = new ArrayList<Double>(); ArrayList<Double> bi = new ArrayList<Double>(); for (i = 0; i < BinData.size(); i++) { bi.add(q_max - BinData.get(i).flw); ai.add(BinData.get(i).dty - rho_crit); } if (BinData.size() > 0) { float sumaibi = 0; float sumaiai = 0; for (i = 0; i < BinData.size(); i++) { sumaibi += ai.get(i) * bi.get(i); sumaiai += ai.get(i) * ai.get(i); } w = (double) (sumaibi / sumaiai); w = Math.max(w, w_min); w = Math.min(w, w_max); } else { w = Double.NaN; } } // store parameters in sensor d.setFdParams(new FDParameters()); // assigns nominal d.getFdParams().setFD(vf, w, q_max); // assigns calculated values, keeps nominals if NaN return d; }
public void ParseInput(CmdVCInput in) { det.ParseInput(in.getInput()); if (state != 0) reg.ParseInput(in); }
/** * Construct Detector instance with smoothing parameter * * @param alpha smoothing parameter (default value = 0.5) * @param langSize number of profiles * @return Detector instance * @throws LangDetectException */ public static Detector create(double alpha) throws LangDetectException { Detector detector = createDetector(); detector.setAlpha(alpha); return detector; }
/** * Reads the detector data from spreadsheet and writes into detectors hashmap The files should be * in the following format and placed in the root directory of the imputer project folder (for * example, see detOutMainlines_431.csv) 1) 5 minute data granularity is assumed 2) The data * should be sorted by alphabetical order of detector IDs and the data column should be * chronologically sorted for each detector * * @throws IOException * @throws BiffException */ public void readDataIntoDetectorListFromSpreadSheet(String filename) throws BiffException, IOException { Workbook workbook = Workbook.getWorkbook(new File(filename)); int rowIndex = 1; // start the index at 1 and increase by number of data points after each iteration // Read absolute detector info and 5 minute data into the hashmap (some fields not important for // fake detectors, left blank or 0 for the time being) for (int key : detectors.keySet()) { Detector d = detectors.get(key); // find row index while (true) { NumberCell nc2 = (NumberCell) workbook.getSheet(0).getCell(0, rowIndex); if (nc2.getValue() == key) { break; } rowIndex++; } NumberCell nc = (NumberCell) workbook.getSheet(0).getCell(4, rowIndex); // Postmile d.setAbsolutePM(nc.getValue()); d.setDetectorLength(0.0); d.setDetectorName(workbook.getSheet(0).getCell(1, rowIndex).getContents()); // Name d.setFreewayDirection(""); d.setFreewayNumber(0); d.setLatitude(0.0); d.setLongitude(0.0); NumberCell nc1 = (NumberCell) workbook.getSheet(0).getCell(13, rowIndex); // Number of Lanes Double temp = nc1.getValue(); d.setNumberOfLanes(temp.intValue()); for (int k = rowIndex; k < rowIndex + totalTimeInHours * 60 / 5; k++) { NumberCell ncSpeed = (NumberCell) workbook.getSheet(0).getCell(6, k); // Speed NumberCell ncFlow = (NumberCell) workbook.getSheet(0).getCell(5, k); // Flow d.addDatumToSpeed(ncSpeed.getValue()); d.addDatumToFlow(ncFlow.getValue() / d.getNumberOfLanes()); d.addDatumToDensity(ncFlow.getValue() / ncSpeed.getValue() / d.getNumberOfLanes()); } nc = (NumberCell) workbook.getSheet(0).getCell(14, rowIndex); // Health if (nc.getValue() == 0) { d.setHealthStatus(100.0); } else { d.setHealthStatus(0.0); } rowIndex = 1; // rowIndex += totalTimeInHours*60/5; } }
/** * Reads the data from database and writes into detectors hashmap * * @throws SQLException */ public void readDataIntoDetectorListFromDatabase() throws SQLException { // TestConfiguration.dbSetup(); PeMSStationAggregateReader stationAggregateReader = new PeMSStationAggregateReader(oraDatabase.doConnect()); ArrayList<Long> vdsIDs = new ArrayList<Long>(); for (int key : detectors.keySet()) { vdsIDs.add((long) key); } List<PeMSStationAggregate> stationsAggregate = stationAggregateReader.read( this.timeInterval, vdsIDs, PeMSAggregate.AggregationLevel.PEMS_5MIN); // Read absolute detector info into the hashmap VDSReader stationReader = new VDSReader(oraDatabase.doConnect()); for (int key : detectors.keySet()) { VDS station = stationReader.read((long) key); Detector d = detectors.get(key); d.setAbsolutePM(station.getAbsolutePostmile()); d.setDetectorLength(station.getDetectorLength()); d.setDetectorName(station.getDetectorName()); d.setFreewayDirection(station.getDirection()); d.setFreewayNumber(station.getFreewayNum()); d.setLatitude(station.getPosition().getPoint().get(0).getLat()); d.setLongitude(station.getPosition().getPoint().get(0).getLng()); d.setNumberOfLanes(station.getLaneCount()); } // Read 5 minute data into the hashmap for (int i = 0; i < stationsAggregate.size(); i++) { // find the detector corresponding to the current ID in the data vector and fill the fields // accordingly Detector d = detectors.get((int) stationsAggregate.get(i).getVdsId()); d.addDatumToSpeed(stationsAggregate.get(i).getTotal().getAvgSpeed()); d.addDatumToFlow( stationsAggregate.get(i).getTotal().getFlow() * 12 / d .getNumberOfLanes()); // to get the hourly rate at 5 minute granularity, multiply // by 12 d.addDatumToDensity( stationsAggregate.get(i).getTotal().getFlow() * 12 / stationsAggregate.get(i).getTotal().getAvgSpeed() / d.getNumberOfLanes()); if (i < detectors.size()) { d.setHealthStatus(stationsAggregate.get(i).getTotal().getObserved()); } } }