@BeforeClass public static void setUpClass() { Configuration defaultConfiguration = new Configuration(); defaultConfiguration.setName("TestCacheManager"); defaultConfiguration.setDefaultCacheConfiguration( new CacheConfiguration().diskPersistent(false)); defaultConfiguration.setUpdateCheck(false); cacheManager = CacheManager.newInstance(defaultConfiguration); }
@BeforeClass public static void setUpClass() { Configuration configuration = new Configuration(); CacheConfiguration cacheConfiguration = new CacheConfiguration(); cacheConfiguration.setMaxEntriesLocalHeap(100); configuration.addDefaultCache(cacheConfiguration); _cacheManager = CacheManager.newInstance(configuration); }
/** @throws IOException */ @Test public void customCachedVariableFragment() throws IOException { logging.setLogLevel("log4j.category.net.sf.ehcache", "DEBUG"); Fragments.setDefaultFragmentCacheType(CacheType.EHCACHE); FileFragment ff = new FileFragment(tf.newFolder("cachedVariableFragmentTest"), "testfrag.cdf"); File cacheLocation = createCacheDir(); Configuration cacheManagerConfig = new Configuration() .diskStore(new DiskStoreConfiguration().path(cacheLocation.getAbsolutePath())); CacheManager manager = CacheManager.newInstance(cacheManagerConfig); CacheConfiguration config = new CacheConfiguration(ff.getName() + "-variable-fragment-cache-custom", 100); config.persistence( new PersistenceConfiguration().strategy(PersistenceConfiguration.Strategy.LOCALTEMPSWAP)); config.setMaxElementsInMemory(10); config.setMaxElementsOnDisk(1000); config.setDiskSpoolBufferSizeMB(10); Ehcache cache = new Cache(config); manager.addCache(cache); log.info( "Storing cache on disk at {}", cacheManagerConfig.getDiskStoreConfiguration().getPath()); log.info("Using disk store size of {}", cache.getDiskStoreSize()); log.info("Overflowing to disk: {}", config.isOverflowToDisk()); ff.setCache(new VariableFragmentArrayCache(cache)); for (int j = 0; j < 100; j++) { VariableFragment vf1 = new VariableFragment(ff, "a" + j); vf1.setArray(new ArrayDouble.D2(10, 39)); VariableFragment vfIndex = new VariableFragment(ff, "index" + j); vfIndex.setArray(new ArrayInt.D1(20)); VariableFragment vf2 = new VariableFragment(ff, "b" + j, vfIndex); List<Array> l = new ArrayList<>(); Array indexArray = vfIndex.getArray(); int offset = 0; for (int i = 0; i < 20; i++) { l.add(new ArrayDouble.D1(10)); indexArray.setInt(i, offset); offset += 10; } vf2.setIndexedArray(l); Assert.assertNotNull(vf1.getArray()); Assert.assertNotNull(vf2.getIndexedArray()); Assert.assertEquals(20, vf2.getIndexedArray().size()); Assert.assertNotNull(vfIndex.getArray()); log.info("In memory: {}; On disk: {}", cache.getSize(), cache.getDiskStoreSize()); } for (IVariableFragment var : ff) { Assert.assertNotNull(var.getArray()); log.info(var.getName() + ": " + var.getArray()); } logging.setLogLevel("log4j.category.net.sf.ehcache", "INFO"); }
/** * @throws java.io.IOException * @throws IOException */ @Test public void cachedVariableFragment() throws IOException { logging.setLogLevel("log4j.category.net.sf.ehcache", "DEBUG"); Fragments.setDefaultFragmentCacheType(CacheType.EHCACHE); FileFragment ff = new FileFragment(tf.newFolder("cachedVariableFragmentTest"), "testfrag.cdf"); Configuration cacheManagerConfig = new Configuration(); CacheManager manager = CacheManager.newInstance(cacheManagerConfig); CacheConfiguration config = new CacheConfiguration(ff.getName() + "-variable-fragment-cache", 100); Ehcache cache = new Cache(config); manager.addCache(cache); for (int j = 0; j < 100; j++) { VariableFragment vf1 = new VariableFragment(ff, "a" + j); vf1.setArray(new ArrayDouble.D2(10, 39)); VariableFragment vfIndex = new VariableFragment(ff, "index" + j); vfIndex.setArray(new ArrayInt.D1(20)); VariableFragment vf2 = new VariableFragment(ff, "b" + j, vfIndex); List<Array> l = new ArrayList<>(); Array indexArray = vfIndex.getArray(); int offset = 0; for (int i = 0; i < 20; i++) { l.add(new ArrayDouble.D1(10)); indexArray.setInt(i, offset); offset += 10; } vf2.setIndexedArray(l); Assert.assertNotNull(vf1.getArray()); Assert.assertNotNull(vf2.getIndexedArray()); Assert.assertEquals(20, vf2.getIndexedArray().size()); Assert.assertNotNull(vfIndex.getArray()); log.info("In memory: {}; On disk: {}", cache.getSize(), cache.getDiskStoreSize()); } for (IVariableFragment var : ff) { Assert.assertNotNull(var.getArray()); log.info(var.getName() + ": " + var.getArray()); } logging.setLogLevel("log4j.category.net.sf.ehcache", "INFO"); }
/** * CacheManager * * @author valdo */ @Log4j @Singleton public class CacheFactory implements AutoCloseable { @Inject private Scheduler scheduler; @Inject private InjectorJobFactory ijf; private final CacheManager manager = CacheManager.newInstance(); private final CacheConfiguration defaultConfig = this.manager.getConfiguration().getCacheConfigurations().get("data"); public CacheFactory() { log.info( String.format( "Cache manager is %s: %s", manager.getStatus(), manager.getActiveConfigurationText())); } public void add(Query query) { String name = query.getQid().getId(); if (!this.manager.cacheExists(name) && query.isCacheable()) { CacheConfiguration config = defaultConfig.clone(); config.setName(name); config.setEternal(query.isEternal()); config.setTimeToLiveSeconds(query.getCacheTime()); Cache c = new Cache(config); this.manager.addCache(c); if (log.isDebugEnabled()) { log.debug( String.format( "Cache %s created: eternal = %s, cacheTime = %d", c.getName(), query.isEternal(), query.getCacheTime())); } } } public Cache get(Query query) { String name = query.getQid().getId(); if (this.manager.cacheExists(name)) { return this.manager.getCache(name); } return null; } public void remove(Query query) { String name = query.getQid().getId(); if (this.manager.cacheExists(name)) { this.manager.removeCache(name); if (log.isDebugEnabled()) { log.debug(String.format("Cache %s removed", name)); } } } public void logStats() { for (String name : manager.getCacheNames()) { StatisticsGateway s = manager.getCache(name).getStatistics(); log.debug( String.format( "Cache %s: hit/miss = %d/%d (%f), heap = %d (%d bytes), disk = %d (%d bytes)", name, s.cacheHitCount(), s.cacheMissCount(), s.cacheHitRatio(), s.getLocalHeapSize(), s.getLocalHeapSizeInBytes(), s.getLocalDiskSize(), s.getLocalDiskSizeInBytes())); } } @Override public void close() throws Exception { manager.shutdown(); } private final Map<Integer, CacheJobData> cacheJobDataMap = new ConcurrentHashMap<>(); public void createCacheJob(Handler<?, ?> handler, long expTime) { try { InjectorJobFactory.startCacheJob(scheduler, ijf, handler); cacheJobDataMap.put(handler.getId(), new CacheJobData(handler, expTime)); } catch (SchedulerException ex) { log.error("Error while scheduling cache job", ex); } } public CacheJobData getCacheJobData(Integer id) { return cacheJobDataMap.remove(id); } @RequiredArgsConstructor @Getter public static class CacheJobData { private final Handler<?, ?> handler; private final long expTime; } }
public ClientContextService() { ClassLoader classLoader = getClass().getClassLoader(); cacheManager = CacheManager.newInstance(classLoader.getResource("ehcache.xml").getFile()); cache = cacheManager.getCache("clientsCache"); System.out.println("ready: " + cache.getName()); }
public RetryLimitHashedCredentialsMatcher() { CacheManager cacheManager = CacheManager.newInstance(CacheManager.class.getClassLoader().getResource("ehcache.xml")); passwordRetryCache = cacheManager.getCache("passwordRetryCache"); }