/** * Retrieves the access token used for interacting with Twitter. * * @return */ public static AccessToken getAccessToken() { // Try to grab the token from the cache. StoredAccessToken storedToken = (StoredAccessToken) cache.get(ACCESS_TOKEN_KEY); if (storedToken != null) { return storedToken.getAccessToken(); } // Now try to grab it from the datastore PersistenceManager pm = PMF.get().getPersistenceManager(); Extent<StoredAccessToken> extent = pm.getExtent(StoredAccessToken.class, false); try { Iterator<StoredAccessToken> itr = extent.iterator(); storedToken = itr.next(); // If the token was found, cache it and return. if (storedToken != null) { cache.put(ACCESS_TOKEN_KEY, storedToken); return storedToken.getAccessToken(); } } finally { extent.closeAll(); pm.close(); } return null; }
@SuppressWarnings("unchecked") public static synchronized List<Key<Line>> getTrainKeys() { String functionName = "getTrainKeys()"; if (trainKeys == null || trainKeys.size() == 0) { Objectify ofy = ObjectifyService.begin(); Query<Line> q = ofy.query(Line.class).filter("type", 21); List<Key<Line>> keys; try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); keys = (List<Key<Line>>) cache.get(q.toString()); if (keys == null) { keys = q.listKeys(); cache.put(q.toString(), keys); } } catch (CacheException e) { keys = q.listKeys(); Logger.getLogger(location).log(Level.SEVERE, functionName + ": Cache error: " + e); e.printStackTrace(); } trainKeys = keys; Logger.getLogger(location) .log(Level.INFO, functionName + ": served new trainKeys. #" + trainKeys.size()); } return trainKeys; }
public int addAndCheckSearchForIp(String ip, int mode) { String functionName = "addAndCheckSearchForIp()"; Cache cache; Map<String, Integer> props = new HashMap<String, Integer>(); props.put(GCacheFactory.EXPIRATION_DELTA, 600); // 10 minutes if (Utils.isUserInSpecialACL()) { // System.err.println("No limit for user: "******"(" + // Utils.getUser().getNickname() + ")"); Logger.getLogger(location) .log( Level.INFO, functionName + ": No limit for user: "******"(" + Utils.getUser().getNickname() + ")"); return 1; } try { String key = "requestCounter:" + mode + ":" + ip; CacheFactory cacheFactory = CacheManager.getInstance().getCacheFactory(); cache = cacheFactory.createCache(props); Integer counter = new Integer(1); Integer o = (Integer) cache.get(key); if (o != null) { counter = counter + o; } cache.put(key, counter); return counter; } catch (CacheException e) { Logger.getLogger(location).log(Level.SEVERE, functionName + ": caching error: " + e); return -1; } }
@Override public Datastore.Stats getStats(boolean useCache) { if (useCache) { try { Stats cachedStats = (Stats) STATS_CACHE.get(STATS_CACHE_KEY); if (cachedStats != null) { return cachedStats; } logger.info("Stats not in cache, re-computing"); } catch (InvalidValueException err) { logger.log(Level.WARNING, "Could not load data from memcache", err); } } Stats ret = new Stats(); DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); PreparedQuery pq = datastore.prepare(new com.google.appengine.api.datastore.Query("__Stat_Kind__")); for (Entity kindStat : pq.asIterable()) { String kind = (String) kindStat.getProperty("kind_name"); if ("Channel".equals(kind)) { ret.numChannels = ((Long) kindStat.getProperty("count")).intValue(); ret.timestamp = (Date) kindStat.getProperty("timestamp"); } } ret.numUsers = countUsersActiveInLastNDays(datastore, -1); ret.oneDayActiveUsers = countUsersActiveInLastNDays(datastore, 1); ret.sevenDayActiveUsers = countUsersActiveInLastNDays(datastore, 7); ret.thirtyDayActiveUsers = countUsersActiveInLastNDays(datastore, 30); STATS_CACHE.put(STATS_CACHE_KEY, ret); return ret; }
protected static StorageStats getStorageStats() { // Try to grab the token from the cache. StorageStats stats = (StorageStats) cache.get(STORAGE_STATS_KEY); if (stats != null) { return stats; } // Now try to grab it from the datastore PersistenceManager pm = PMF.get().getPersistenceManager(); Extent<StorageStats> extent = pm.getExtent(StorageStats.class, false); try { Iterator<StorageStats> itr = extent.iterator(); if (itr.hasNext()) { stats = itr.next(); } else { stats = new StorageStats(); pm.makePersistent(stats); } return stats; } finally { extent.closeAll(); pm.close(); cache.put(STORAGE_STATS_KEY, stats); } }
public PQA getPQA(String geoCell, Objectify ofy) { String shortened = geoCell.substring(0, geoCell.length() - 2); if (mapBusCache.containsKey(shortened)) { // System.err.println("getting cached pqa"); return mapBusCache.get(shortened).get(geoCell); } else { try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); @SuppressWarnings("unchecked") HashMap<String, PQA> list = (HashMap<String, PQA>) cache.get("PQAsCache" + shortened); if (list == null) { list = generatePQAsCombination(shortened, ofy); cache.put("PQAsCache" + shortened, list); // System.out.print("was not in cache ... "); } // System.out.println("got a result " + list.size()); mapBusCache.put(shortened, list); return list.get(geoCell); } catch (CacheException e) { Logger.getLogger(location).log(Level.INFO, "getPQA(): CacheException: " + e); HashMap<String, PQA> list = generatePQAsCombination(shortened, ofy); mapBusCache.put(shortened, list); return list.get(geoCell); } } }
@SuppressWarnings("unchecked") public Collection<Point> getSearchPointsForLine( Key<Line> l, int middleIndex, int plusMinusIndex, Objectify ofy) { String functionName = "getSearchPointsForLine(int plusMinusIndex)"; List<Key<Point>> keys; plusMinusIndex++; // query is exclusive, therefore we expand by one to return the expected // number of results Query<Point> q = ofy.query(Point.class) .ancestor(l) .filter("ignore", false) .filter("index <", middleIndex + plusMinusIndex) .filter("index >", middleIndex - plusMinusIndex); try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); keys = (List<Key<Point>>) cache.get(q.toString()); if (keys == null) { keys = q.listKeys(); cache.put(q.toString(), keys); } } catch (CacheException e) { keys = q.listKeys(); Logger.getLogger(location).log(Level.SEVERE, functionName + ": Cache error: " + e); e.printStackTrace(); } Map<Key<Point>, Point> points = ofy.get(keys); return points.values(); }
@SuppressWarnings("unchecked") public Collection<Point> getSearchPointsForLine( Key<Line> l, String geoCell, int plusMinusIndex, Objectify ofy) { String functionName = "getSearchPointsForLine(String geoCell)"; Query<Point> q = ofy.query(Point.class) .ancestor(l) .filter("ignore", false) .filter("defaultGeoCell", geoCell) .limit(1); Key<Point> kMiddle; try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); kMiddle = (Key<Point>) cache.get(q.toString()); if (kMiddle == null) { kMiddle = q.getKey(); cache.put(q.toString(), kMiddle); } } catch (CacheException e) { kMiddle = q.getKey(); Logger.getLogger(location).log(Level.SEVERE, functionName + ": Cache error: " + e); e.printStackTrace(); } if (kMiddle == null) { // dirty hack, but still better than failing in case that no point was found q = ofy.query(Point.class).ancestor(l).filter("defaultGeoCell", geoCell).limit(1); kMiddle = q.getKey(); System.err.println( "had to resort to dirty hack and retrieve a point that is set to ignore for line " + l + " in cell " + geoCell + ". Result: " + kMiddle); } try { Point middle = ofy.get(kMiddle); Collection<Point> points; if (plusMinusIndex > 0) { points = getSearchPointsForLine(l, middle.getIndex(), plusMinusIndex, ofy); } else { points = new LinkedList<Point>(); points.add(middle); } return points; } catch (NullPointerException e) { q = ofy.query(Point.class).ancestor(l).filter("ignore", false); System.err.println( "because of " + e + " had to resort to even dirtier hack and retrieve all potential points for line " + l + ", even those not in cell " + geoCell); return q.list(); } }
public static String getUpdatesSince(long sinceId) { // First try to pull the response from the cache. @SuppressWarnings("unchecked") Map<Long, String> cachedQueries = (Map<Long, String>) cache.get(CACHED_QUERIES_KEY); Long sinceIdObj = Long.valueOf(sinceId); if (cachedQueries == null) { cachedQueries = new HashMap<Long, String>(); } else if (cachedQueries.containsKey(sinceIdObj)) { log.info("Found query in the cache: " + sinceId); return cachedQueries.get(sinceIdObj); } // If we haven't cached this response, we must query for it. PersistenceManager pm = PMF.get().getPersistenceManager(); JSONArray resultArray = new JSONArray(); try { Query query = pm.newQuery(TrainUpdate.class); query.setOrdering("twitterId ASC"); if (sinceId >= 0) { query.setFilter("twitterId > " + sinceId); } @SuppressWarnings("unchecked") List<TrainUpdate> updates = (List<TrainUpdate>) query.execute(); for (TrainUpdate update : updates) { JSONObject updateJson = update.getJSON(); resultArray.put(updateJson); } // Append any updates that are stored in the cache to this result. @SuppressWarnings("unchecked") List<TrainUpdate> cachedUpdates = (List<TrainUpdate>) cache.get(CACHED_UPDATES_KEY); if (cachedUpdates != null) { log.info("Fetched cache with size of : " + cachedUpdates.size()); for (TrainUpdate update : cachedUpdates) { if (update.getTwitterId() > sinceId) { JSONObject updateJson = update.getJSON(); resultArray.put(updateJson); } } } } finally { pm.close(); } // Finally cache the response. String result = resultArray.toString(); cachedQueries.put(sinceIdObj, result); cache.put(CACHED_QUERIES_KEY, cachedQueries); return result; }
private void removeFromCache(Object key) { try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); cache.remove(key); } catch (CacheException e) { Logger.getLogger(location).log(Level.SEVERE, "removeFromCache(): Cache error: " + e); e.printStackTrace(); } }
public CustomCacheManager() { try { cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); cache.clear(); } catch (CacheException e) { throw new RuntimeException("Impossible to get the cache from the manager"); } }
@SuppressWarnings("unchecked") public Collection<UserFavouritePosition> getUserFavouritePositions(User user, Objectify ofy) { String functionName = "getUserFavouritePositions()"; Query<UserFavouritePosition> q = ofy.query(UserFavouritePosition.class).filter("user", user); List<Key<UserFavouritePosition>> keys; try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); keys = (List<Key<UserFavouritePosition>>) cache.get(q.toString()); if (keys == null) { keys = q.listKeys(); cache.put(q.toString(), keys); } } catch (CacheException e) { Logger.getLogger(location).log(Level.SEVERE, functionName + ": caching error: " + e); keys = q.listKeys(); } return ofy.get(keys).values(); }
@SuppressWarnings("unchecked") public static synchronized HashMap<String, Set<TrainNode>> getTrainNodes() { String functionName = "getTrainNodes()"; if (trainNodes == null || trainNodes.size() == 0) { trainNodes = new HashMap<String, Set<TrainNode>>(); Objectify ofy = ObjectifyService.begin(); Query<TrainNode> q = ofy.query(TrainNode.class); List<Key<TrainNode>> keys; try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); keys = (List<Key<TrainNode>>) cache.get(q.toString()); if (keys == null) { keys = q.listKeys(); cache.put(q.toString(), keys); } } catch (CacheException e) { keys = q.listKeys(); Logger.getLogger(location).log(Level.SEVERE, functionName + ": Cache error: " + e); e.printStackTrace(); } Map<Key<TrainNode>, TrainNode> res = ofy.get(keys); Collection<TrainNode> tns = res.values(); Logger.getLogger(location) .log( Level.INFO, functionName + ": Got " + res.size() + " TrainNodes. keys.size(): " + keys.size()); // String m = ""; for (TrainNode tn : tns) { if (!trainNodes.containsKey(tn.getGeoCell())) { trainNodes.put(tn.getGeoCell(), new HashSet<TrainNode>()); } trainNodes.get(tn.getGeoCell()).add(tn); /* if(tn.getLineKey().equals(new Key<Line>(Line.class, 155))) { // if(tn.getLineType() == 11) { System.err.print("\"" + tn.getGeoCell() + "\", "); }*/ } // Utils.eMailGeneric(m, "DaoTemp"); } return trainNodes; }
/* * returns the Points of a line which are from the original import, without the intermediate points that are created for searches * caches the keys of the query result so that the entities can be read through a batch get (which checks memcache first). this caching should be evaluated and removed if it does not work out */ @SuppressWarnings("unchecked") public Collection<Point> getPointsToDisplayForLine(Line l, Objectify ofy) { String functionName = "getPointsForLine()"; List<Key<Point>> keys; Query<Point> q = ofy.query(Point.class).ancestor(l).filter("forSearchOnly", false).order("index"); try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); keys = (List<Key<Point>>) cache.get(q.toString()); if (keys == null) { keys = q.listKeys(); cache.put(q.toString(), keys); } } catch (CacheException e) { keys = q.listKeys(); Logger.getLogger(location).log(Level.SEVERE, functionName + ": Cache error: " + e); e.printStackTrace(); } Map<Key<Point>, Point> points = ofy.get(keys); return points.values(); }
/** * Adds new updates to storage. * * @param newUpdates */ public static void addUpdates(List<TrainUpdate> newUpdates) { if (newUpdates.isEmpty()) { return; } // Add the new updates to the list of cached results. @SuppressWarnings("unchecked") List<TrainUpdate> cachedUpdates = (List<TrainUpdate>) cache.get(CACHED_UPDATES_KEY); if (cachedUpdates == null) { cachedUpdates = new ArrayList<TrainUpdate>(); } log.info("Fetched cache with size of : " + cachedUpdates.size()); cachedUpdates.addAll(newUpdates); // Update the storage stats StorageStats stats = getStorageStats(); stats.setLatestUpdateId(newUpdates.get(0).getTwitterId()); // Attempt to persist the updates to the data store. PersistenceManager pm = PMF.get().getPersistenceManager(); try { pm.makePersistentAll(cachedUpdates); pm.makePersistent(stats); cachedUpdates.clear(); log.info("Cache cleared"); } catch (DatastoreTimeoutException ex) { log.info("Couldn't write to datastore. caching instead"); } finally { // No matter what happens, store the new cached results. cache.remove(CACHED_QUERIES_KEY); cache.put(CACHED_UPDATES_KEY, cachedUpdates); cache.put(STORAGE_STATS_KEY, stats); log.info("Updated cache to size of : " + cachedUpdates.size()); pm.close(); } }
@Override public void run() { Cache cache = CacheManager.getInstance().getCache(cacheName); Random random = new Random(); try { while (true) { Thread.sleep((long) (random.nextFloat() * 5)); long maxKey = ObjectFactory.getMaxKey(cacheName); long key = (long) (maxKey * random.nextFloat()); Object value = cache.get(String.valueOf(key)); if (value != null) { if (!value.getClass().getName().equals(cacheName)) { new RuntimeException("Wrong object in cache"); } } } } catch (InterruptedException e) { } }
/** * Gets the ID of the last pulled train update. * * @return the ID or -1 if there aren't any stored IDs. */ public static long getLatestUpdateId() { StorageStats stats = getStorageStats(); if (stats != null) { return stats.getLatestUpdateId(); } // TODO: will stats ever be null? // TODO: We should store the latest ID instead of querying for it. // That way we don't run into a situation where the data store gets cleared and we end up // pulling 200 updates. // Attempt to just pull the value out of the cache. // We assume that the latest updates will always live here. @SuppressWarnings("unchecked") List<TrainUpdate> cachedUpdates = (List<TrainUpdate>) cache.get(CACHED_UPDATES_KEY); if (cachedUpdates != null && !cachedUpdates.isEmpty()) { log.info("Fetched cache with size of : " + cachedUpdates.size()); return cachedUpdates.get(cachedUpdates.size() - 1).getTwitterId(); } // If no cached updates were found, we must query the datastore. long sinceId = -1; PersistenceManager pm = PMF.get().getPersistenceManager(); try { Query query = pm.newQuery(TrainUpdate.class); query.setOrdering("date DESC"); query.setRange(0, 1); @SuppressWarnings("unchecked") List<TrainUpdate> oldUpdates = (List<TrainUpdate>) query.execute(); if (oldUpdates.size() > 0) { sinceId = oldUpdates.get(0).getTwitterId(); } } finally { pm.close(); } return sinceId; }
/** * Saves the access token required by twitter. * * @param accessToken */ public static void setAccessToken(AccessToken accessToken) { StoredAccessToken storedToken = new StoredAccessToken(accessToken); // Add the token to the cache. cache.put(ACCESS_TOKEN_KEY, storedToken); // Now persist it from the datastore PersistenceManager pm = PMF.get().getPersistenceManager(); Query delQuery = pm.newQuery(StoredAccessToken.class); try { delQuery.deletePersistentAll(); pm.makePersistent(storedToken); } finally { delQuery.closeAll(); pm.close(); } }
public ConnectionProxy indirectSearch( GeoPt start, GeoPt dest, Set<Key<Line>> tabuTrainsSet, Set<Key<Line>> mlkSet, Objectify ofy) { final String functionName = "newIndirectSearch()"; final int plusMinus = 8; final int maxAlternatives = 8; HashSet<Key<PQA>> PQsForAStar = new HashSet<Key<PQA>>(); Logger.getLogger(location) .log(Level.INFO, functionName + ": " + "fetching PQAs for " + mlkSet.size() + " lines."); try { Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap()); for (Key<Line> k : mlkSet) { Query<PQA> q = ofy.query(PQA.class).filter("lineKeys", k); @SuppressWarnings("unchecked") List<Key<PQA>> rl = (List<Key<PQA>>) cache.get(q.toString()); if (rl == null) { rl = q.listKeys(); cache.put(q.toString(), rl); } PQsForAStar.addAll(rl); } } catch (CacheException e) { Logger.getLogger(location).log(Level.SEVERE, functionName + ": Cache error: " + e); e.printStackTrace(); } Logger.getLogger(location) .log(Level.FINER, functionName + ": " + "Got " + PQsForAStar.size() + " PQs."); /*for(PlanQuadrat pq : PQsForAStar) { System.err.print("\"" + pq.getGeoCell() + "\","); } System.err.println();*/ HashSet<String> setBusKey = new HashSet<String>(); for (Key<PQA> pq : PQsForAStar) { setBusKey.add(pq.getName()); } // HashMap<String, PlanQuadrat> mapBus = new HashMap<String, PlanQuadrat>(); WayHolder wh = new AStarImpl() .aStarSearch( Utils.computeGeoCell( new com.beoui.geocell.model.Point(start.getLatitude(), start.getLongitude())), Utils.computeGeoCell( new com.beoui.geocell.model.Point(dest.getLatitude(), dest.getLongitude())), setBusKey, mlkSet, tabuTrainsSet, ofy); // erste Cell jeweils aus Liste Start und Liste Dest Cells. das gehört noch // überarbeitet /* for(AStarNode asn : wh.getWay()) { System.err.print("\"" + asn.getGeoCell() + "\", "); } System.err.println(); for(AStarNode asn : wh.getWay()) { System.err.print("\"" + asn.getOwningLine() + "\", "); } System.err.println();*/ if (wh != null && wh.getWay().size() > 1) { List<AStarNode> way = wh.getWay(); List<AStarNode> umsteigen = wh.getCombinationPoints(); int index = 0; if (way.get(0).getOwningLine() == null) { index = 1; } Collection<Point> pointsStart; if (way.get(index).getClass() == AStarNodeImpl.class) { pointsStart = getSearchPointsForLine( way.get(index).getOwningLine(), way.get(index).getPointGeoCell(), plusMinus, ofy); } else { Line tempLine = getLineByKey(way.get(index).getOwningLine(), ofy); if (tempLine.getType() == 11 || tempLine.getType() == 13 || tempLine.getType() == 15) { pointsStart = getSearchPointsForLine( way.get(index).getOwningLine(), way.get(index).getPointGeoCell(), 1, ofy); } else { pointsStart = getSearchPointsForLine( way.get(index).getOwningLine(), way.get(index).getPointGeoCell(), 0, ofy); } } // System.err.println("pointsStart.size: " + pointsStart.size()); Point startPoint = Utils.closestPoint(start, pointsStart); List<LineProxy> lineProxies = new LinkedList<LineProxy>(); LineProxy walk = Utils.walk(new Point(null, start.getLatitude(), start.getLongitude(), null), startPoint); lineProxies.add(walk); Point lastPoint = startPoint; AStarNode lastNode = way.get(index); for (int i = index; i < way.size(); i++) { AStarNode an = way.get(i); if (umsteigen.contains(an)) { Collection<Point> pointsLine1; if (an.getClass() == AStarNodeImpl.class) { pointsLine1 = getSearchPointsForLine(lastPoint.getOwner(), an.getPointGeoCell(), plusMinus, ofy); } else { Line tempLine = getLineByKey(an.getOwningLine(), ofy); if (tempLine.getType() == 11 || tempLine.getType() == 13 || tempLine.getType() == 15) { pointsLine1 = getSearchPointsForLine(an.getOwningLine(), an.getPointGeoCell(), 1, ofy); } else { pointsLine1 = getSearchPointsForLine(an.getOwningLine(), an.getPointGeoCell(), 0, ofy); } } AStarNode next = way.get(i + 1); Collection<Point> pointsLine2; if (next.getClass() == AStarNodeImpl.class) { // System.err.println("Line: " + next.getOwningLine() + " / " + // Dao.getInstance().getLineByKey(next.getOwningLine(), ofy) + " / " + // next.getPointGeoCell()); pointsLine2 = getSearchPointsForLine( next.getOwningLine(), next.getPointGeoCell(), plusMinus, ofy); } else { Line tempLine = getLineByKey(next.getOwningLine(), ofy); if (tempLine.getType() == 11 || tempLine.getType() == 13 || tempLine.getType() == 15) { pointsLine2 = getSearchPointsForLine(next.getOwningLine(), next.getPointGeoCell(), 1, ofy); } else { pointsLine2 = getSearchPointsForLine(next.getOwningLine(), next.getPointGeoCell(), 0, ofy); } } Logger.getLogger(location) .log( Level.FINE, functionName + ": Umsteigen von " + an.getOwningLine() + " zu " + next.getOwningLine() + " in " + an.getPointGeoCell() + " und " + next.getPointGeoCell() + ". Results: " + pointsLine1.size() + " und " + pointsLine2.size()); Iterator<Point> j1 = pointsLine1.iterator(); double min_distance = 999999999.9; Tuple<Point, Point> tuple_min = null; while (j1.hasNext()) { Point outerPoint = j1.next(); Iterator<Point> j2 = pointsLine2.iterator(); while (j2 .hasNext()) { // für jeden Punkt (innerhalb der errechneten cells) von Line inner // distanz zu jedem Punkt von outer berechnen, das kürzeste Paar // speichern Point innerPoint = j2.next(); double distance = Utils.distanceApprox(innerPoint, outerPoint); if (innerPoint.isIgnore() || outerPoint .isIgnore()) { // absolutely avoid points that are set as ignore. Related to // the dirty hack that searches any point, if none were found // that are not "ignore"-flagged. Theoretically there shouldn't // be any case where this is necessary, but right now there is // (rarely). distance += 10000; } if (distance < min_distance) { min_distance = distance; tuple_min = new Tuple<Point, Point>(outerPoint, innerPoint); } } } LineProxy c = Utils.getConnection(lastPoint, tuple_min.getFirst(), ofy); lineProxies.add(c); Line lastPointOwner1 = ofy.get(lastPoint.getOwner()); if (lastPointOwner1.getType() == 1) { // wenn bus, alternativen suchen List<Key<Line>> alternativesK = new LinkedList<Key<Line>>(); PlanQuadrat pq1 = Dao.getInstance().getPlanQuadrat(lastPoint.getDefaultGeoCell(), ofy); PlanQuadrat pq2 = Dao.getInstance().getPlanQuadrat(tuple_min.getFirst().getDefaultGeoCell(), ofy); int counter = 0; for (Key<Line> k1 : pq1.getDirectLineKeys()) { if (k1.getId() != lastPoint.getOwner().getId() // nicht der bus der eh schon genommen wird && pq2.getDirectLineKeys() .contains(k1) // in anfangs und end PQ gleichermaßen enthalten && pq1.getIndices().get(pq1.getDirectLineKeys().indexOf(k1)) <= pq2.getIndices().get(pq2.getDirectLineKeys().indexOf(k1)) && counter < maxAlternatives) { alternativesK.add(k1); counter++; // Logger.getLogger("ListPointsServiceImpl").log(Level.INFO, functionName + ": // Matching line: " + KeyFactory.keyToString(k1)); } } for (Key<Line> k : alternativesK) { Line l = getLineByKey(k, ofy); c.addAlternativeLine(l.getLinenum() + " " + l.getRamal()); } } else if (lastPointOwner1.getType() == 21) { // auch für züge alternativen suchen List<Key<Line>> alternativesK = new LinkedList<Key<Line>>(); Set<TrainNode> tns1 = Dao.getTrainNodes().get(lastNode.getGeoCell()); Set<TrainNode> tns2 = Dao.getTrainNodes().get(an.getGeoCell()); for (TrainNode tn1 : tns1) { if (!tn1.getLineKey() .equals(lastPoint.getOwner())) { // nicht der Zug der eh schon genommen wird for (TrainNode tn2 : tns2) { if (tn1.getLineKey().equals(tn2.getLineKey()) // gehören zum gleichen zug && tn1.getIndex() < tn2.getIndex()) { // und der index steig alternativesK.add(tn1.getLineKey()); } } } } for (Key<Line> k : alternativesK) { Line l = getLineByKey(k, ofy); if (!l.getRamal().equals(c.getRamal())) { c.addAlternativeLine("Ramal a " + l.getRamal()); } } } walk = Utils.walk(tuple_min.getFirst(), tuple_min.getSecond()); lineProxies.add(walk); lastPoint = tuple_min.getSecond(); lastNode = next; } } index = way.size() - 1; if (way.get(index).getOwningLine() == null) { index = way.size() - 2; } Collection<Point> pointsDest; if (way.get(index).getClass() == AStarNodeImpl.class) { pointsDest = getSearchPointsForLine( way.get(index).getOwningLine(), way.get(index).getPointGeoCell(), plusMinus, ofy); } else { Line tempLine = getLineByKey(way.get(index).getOwningLine(), ofy); if (tempLine.getType() == 11 || tempLine.getType() == 13 || tempLine.getType() == 15) { pointsDest = getSearchPointsForLine( way.get(index).getOwningLine(), way.get(index).getPointGeoCell(), 1, ofy); } else { pointsDest = getSearchPointsForLine( way.get(index).getOwningLine(), way.get(index).getPointGeoCell(), 0, ofy); } } Point destPoint = Utils.closestPoint(dest, pointsDest); LineProxy c = Utils.getConnection(lastPoint, destPoint, ofy); lineProxies.add(c); Line lastPointOwner2 = ofy.get(lastPoint.getOwner()); if (lastPointOwner2.getType() == 1) { // wenn bus, alternativen suchen List<Key<Line>> alternativesK = new LinkedList<Key<Line>>(); PlanQuadrat pq1 = Dao.getInstance().getPlanQuadrat(lastPoint.getDefaultGeoCell(), ofy); PlanQuadrat pq2 = Dao.getInstance().getPlanQuadrat(destPoint.getDefaultGeoCell(), ofy); int counter = 0; for (Key<Line> k1 : pq1.getDirectLineKeys()) { if (k1.getId() != lastPoint.getOwner().getId() // nicht der bus der eh schon genommen wird && pq2.getDirectLineKeys() .contains(k1) // in anfangs und end PQ gleichermaßen enthalten && pq1.getIndices().get(pq1.getDirectLineKeys().indexOf(k1)) <= pq2.getIndices().get(pq2.getDirectLineKeys().indexOf(k1)) && counter < maxAlternatives) { alternativesK.add(k1); counter++; // Logger.getLogger("ListPointsServiceImpl").log(Level.INFO, functionName + ": Matching // line: " + KeyFactory.keyToString(k1)); } } for (Key<Line> k : alternativesK) { Line l = getLineByKey(k, ofy); c.addAlternativeLine(l.getLinenum() + " " + l.getRamal()); } } else if (lastPointOwner2.getType() == 21) { // auch für züge alternativen suchen // System.err.println(lastPoint.getOwner() + " " + lastPointOwner2 + " " + // lastPoint.getDefaultGeoCell()); List<Key<Line>> alternativesK = new LinkedList<Key<Line>>(); Set<TrainNode> tns1 = Dao.getTrainNodes().get(lastNode.getGeoCell()); Set<TrainNode> tns2 = Dao.getTrainNodes().get(way.get(index).getGeoCell()); for (TrainNode tn1 : tns1) { if (!tn1.getLineKey() .equals(lastPoint.getOwner())) { // nicht der Zug der eh schon genommen wird for (TrainNode tn2 : tns2) { if (tn1.getLineKey().equals(tn2.getLineKey()) // gehören zum gleichen zug && tn1.getIndex() < tn2.getIndex()) { // und der index steig alternativesK.add(tn1.getLineKey()); } } } } for (Key<Line> k : alternativesK) { Line l = getLineByKey(k, ofy); if (!l.getRamal().equals(c.getRamal())) { c.addAlternativeLine("Ramal " + l.getRamal()); } } } walk = Utils.walk(destPoint, new Point(null, dest.getLatitude(), dest.getLongitude(), null)); lineProxies.add(walk); ConnectionProxy cp = new ConnectionProxy(lineProxies); return cp; } else { Logger.getLogger(location).log(Level.INFO, functionName + ": no indirect connection found."); return null; } }
public static void clearCache() { cache.clear(); }
@SuppressWarnings("unchecked") public List<ArticleLink> getRecentArticlesUrl() { return (List<ArticleLink>) cache.get(RECENT_ARTICLE_KEY); }
public void setRecentArticlesUrl(List<ArticleLink> urls) { cache.put(RECENT_ARTICLE_KEY, urls); }
@SuppressWarnings("unchecked") public List<ArticleLink> getPopularArticlesUrl() { return (List<ArticleLink>) cache.get(POPULAR_ARTICLE_KEY); }
public void setRelativeUrls(List<ArticleLink> urls) { cache.put(RELATIVE_URL_KEY, urls); }
/** * Sets the Cache instance used be Caching repositories. By registering them to the snapshotter * trigger, it can optimize memory usage by clearing counters held for aggregates that are * contained in caches. When an aggregate is evicted or deleted from the cache, its event counter * is removed from the trigger. * * <p>Use the {@link #setAggregateCaches(java.util.List)} method if you have configured different * caches for different repositories. * * <p>Using this method will automatically set {@link #setClearCountersAfterAppend(boolean)} to * <code>false</code>. * * @param cache The cache used by caching repositories * @see #setAggregateCaches(java.util.List) */ public void setAggregateCache(Cache cache) { this.clearCountersAfterAppend = false; cache.addListener(new CacheListener()); }
public void expireArticlesUrl() { cache.remove(RECENT_ARTICLE_KEY); cache.remove(POPULAR_ARTICLE_KEY); cache.remove(RELATIVE_URL_KEY); }
public void setPopularArticlesUrl(List<ArticleLink> urls) { cache.put(POPULAR_ARTICLE_KEY, urls); }
@SuppressWarnings("unchecked") public List<ArticleLink> getRelativeUrls() { return (List<ArticleLink>) cache.get(RELATIVE_URL_KEY); }