/** * Method declaration * * @throws SQLException */ void checkpoint(boolean defrag) throws SQLException { if (defrag) { ArrayList rootsArray = cCache.defrag(); for (int i = 0; i < rootsArray.size(); i++) { int[] roots = (int[]) rootsArray.get(i); if (roots != null) { Trace.printSystemOut(org.hsqldb.lib.StringUtil.getList(roots, " ", "")); } } DataFileDefrag2.updateTableIndexRoots(dDatabase.getTables(), rootsArray); } close(false); pProperties.setProperty("modified", "yes"); pProperties.save(); if (cCache != null) { cCache.open(false); } reopenAllTextCaches(); openScript(); }
/** * Unregisters the object associated with the specified class and context. * * @param clazz the class * @param context the context */ public void unregister(Class<?> clazz, K context) { Cache<K, T> cache = getCache(clazz); if (cache != null) { cache.setObject(context, null); if (cache.size() == 0) { _cache.remove(clazz); } } }
/** * Gets the secondary keys that are registered with the class in CacheMap. * * @param clazz the class * @param a the array to receive the keys. * @return the secondary keys. */ public K[] getKeys(Class<?> clazz, K[] a) { Cache<K, T> cache = getCache(clazz); if (cache != null) { Set<K> set = cache.keySet(); return set.toArray(a); } else { return a; } }
public static void main(String[] args) { Cache ch = new Cache(0, 9); ch.refer(1); ch.refer(2); ch.refer(3); ch.refer(1); ch.refer(4); ch.refer(5); ch.display(60); }
/** * Remove all registrations for the designated class. * * @param clazz the class */ @SuppressWarnings("unchecked") public void remove(Class<?> clazz) { Cache<K, T> cache = getCache(clazz); if (cache != null) { Object[] keys = cache.keySet().toArray(); for (Object context : keys) { cache.setObject((K) context, null); } } _cache.remove(clazz); }
public List<T> getValues() { List<T> list = new ArrayList<>(); Collection<Cache<K, T>> col = _cache.values(); for (Cache<K, T> o : col) { Collection<T> col2 = o.values(); for (T o2 : col2) { if (!list.contains(o2)) { list.add(o2); } } } return list; }
/** * Registers an object with the specified clazz and object. * * @param clazz the class which is used as the key. * @param object the object, or the value of the mapping * @param context the secondary key. It is used to register multiple objects to the same primary * key (the clazz parameter in this case). */ public void register(Class<?> clazz, T object, K context) { if (clazz == null) { throw new IllegalArgumentException("Parameter clazz cannot be null"); } // register primitive type automatically if (TypeUtils.isPrimitiveWrapper(clazz)) { Class<?> primitiveType = TypeUtils.convertWrapperToPrimitiveType(clazz); register(primitiveType, object, context); } Cache<K, T> cache = initCache(clazz); cache.setObject(context, object); }
public static void closeCache() { try { if (cache != null && !cache.isClosed()) { cache.close(); } } catch (Exception e) { e.printStackTrace(); } try { if (ds != null) ds.disconnect(); } catch (Exception e) { getLogWriter().fine("Error in disconnecting from Distributed System"); } }
/** * Tests that we are in {@link GemFireHealth#OKAY_HEALTH okay} health if the hit ratio dips below * the threshold. */ public void testCheckHitRatio() throws CacheException { Cache cache = CacheFactory.create(this.system); // CachePerfStats stats = ((GemFireCache) cache).getCachePerfStats(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setCacheLoader( new CacheLoader() { public Object load(LoaderHelper helper) throws CacheLoaderException { return "Loaded"; } public void close() {} }); RegionAttributes attrs = factory.create(); Region region = cache.createRegion(this.getName(), attrs); GemFireHealthConfig config = new GemFireHealthConfigImpl(null); config.setMinHitRatio(0.5); CacheHealthEvaluator eval = new CacheHealthEvaluator(config, this.system.getDistributionManager()); List status = new ArrayList(); eval.evaluate(status); assertEquals(0, status.size()); region.get("One"); region.get("One"); region.get("One"); status = new ArrayList(); eval.evaluate(status); assertEquals(0, status.size()); for (int i = 0; i < 50; i++) { region.get("Miss " + i); } status = new ArrayList(); eval.evaluate(status); AbstractHealthEvaluator.HealthStatus ill = (AbstractHealthEvaluator.HealthStatus) status.get(0); assertEquals(GemFireHealth.OKAY_HEALTH, ill.getHealthCode()); String s = "The hit ratio of this Cache"; assertTrue(ill.getDiagnosis().indexOf(s) != -1); }
void addToPlaylist(List<File> files, Playlist p) { if (interfaceDisabled) return; new Thread( () -> { setInterfaceDisabled(true); int total = files.size(); AtomicInteger current = new AtomicInteger(0); Cache.pushToPlaylist( files, p, (Track t) -> { Platform.runLater( () -> { infoLabel.setText( String.format( res.getString("processed"), current.incrementAndGet(), total, t.getTitle())); if (p == getSelectedPlaylist()) { tracksView.getItems().remove(t); tracksView.getItems().add(0, t); } }); }); setInterfaceDisabled(false); }) .start(); if (p == getSelectedPlaylist()) { Platform.runLater( () -> { loadSelectedPlaylist(); }); } }
void renameTrack() { if (interfaceDisabled) return; Playlist p = getSelectedPlaylist(); if (p == null) return; Track t = getSelectedTrack(); if (t == null) return; TextInputDialog dialog = new TextInputDialog(t.getTitle()); dialog.setTitle(res.getString("rename_track")); dialog.setHeaderText(res.getString("rename_track")); dialog.setContentText(res.getString("enter_new_title")); dialog.getDialogPane().getStylesheets().add("/styles/dialogs.css"); ((Stage) dialog.getDialogPane().getScene().getWindow()).getIcons().addAll(logoImages); Optional<String> result = dialog.showAndWait(); result.ifPresent( title -> { if (StringUtils.isEmpty(title)) { return; } Cache.renameTrack(t, p, title); Platform.runLater( () -> { loadSelectedPlaylist(); }); }); }
public static void runTest3() throws Exception { boolean exceptionOccured = false; try { Context ctx = cache.getJNDIContext(); DataSource ds1 = null; DataSource ds2 = null; ds1 = (DataSource) ctx.lookup("java:/XAPooledDataSource"); ds2 = (DataSource) ctx.lookup("java:/SimpleDataSource"); ds2.getConnection(); UserTransaction utx = (UserTransaction) ctx.lookup("java:/UserTransaction"); utx.begin(); utx.setTransactionTimeout(2); ds1.getConnection(); Thread.sleep(4000); try { utx.commit(); } catch (Exception e) { exceptionOccured = true; } if (!exceptionOccured) fail("Exception (Transaction-Time-Out)did not occur although was supposed" + "to occur"); } catch (Exception e) { fail("failed in runTest3 due to " + e); } }
@Test public void shouldCheckRepoExistenceBeforeTryingPurge() throws IOException, IllegalAccessException { factory.createSuiteTimeRepo("foo", LATEST_VERSION); Cache<EntryRepo> repos = (Cache<EntryRepo>) deref("cache", factory); List<String> keys = repos.keys(); assertThat(keys.size(), is(1)); String fooKey = keys.get(0); repos.clear(); try { factory.purge(fooKey); } catch (IOException e) { e.printStackTrace(); fail("Should not fail when trying to purge already purged entry"); } }
public static void runTest1() throws Exception { boolean exceptionOccured = false; try { Context ctx = cache.getJNDIContext(); DataSource ds1 = null; DataSource ds2 = null; ds1 = (DataSource) ctx.lookup("java:/XAPooledDataSource"); ds2 = (DataSource) ctx.lookup("java:/SimpleDataSource"); ds2.getConnection(); ds1 = (DataSource) ctx.lookup("java:/XAPooledDataSource"); UserTransaction utx = (UserTransaction) ctx.lookup("java:/UserTransaction"); utx.begin(); ds1.getConnection(); Thread.sleep(8000); try { utx.commit(); } catch (Exception e) { exceptionOccured = true; } if (!exceptionOccured) fail("Exception did not occur on commit although was supposed" + "occur"); } catch (Exception e) { getLogWriter().fine("Exception caught in runTest1 due to : " + e); fail("failed in runTest1 due to " + e); } }
protected Listener getListener(Container.Entry entry) { URI key = entry.getUri(); if (cache.containsKey(key)) { return cache.get(key); } else { Listener listener; try (InputStream inputStream = entry.getInputStream()) { ANTLRJavaParser.parse(new ANTLRInputStream(inputStream), listener = new Listener(entry)); } catch (IOException ignore) { listener = null; } cache.put(key, listener); return listener; } }
public static void startCache() { try { if (cache == null || cache.isClosed()) { cache = CacheFactory.create(ds); } } catch (Exception e) { e.printStackTrace(); } }
/** * Gets the exact match registered object. Different from {@link #getRegisteredObject(Class, * Object)} which will try different context and super classes and interfaces to find match. This * method will do an exact match. * * @param clazz the class which is used as the primary key. * @param context the context which is used as the secondary key. This parameter could be null in * which case the default context is used. * @return registered object the object associated with the class and the context. */ public T getMatchRegisteredObject(Class<?> clazz, K context) { if (clazz == null) { return null; } if (context == null) { context = _defaultContext; } Cache<K, T> cache = getCache(clazz); if (cache != null) { T object = cache.getObject(context); if (object != null) { return object; } } return null; }
public Object evaluate(ExecutionContext context) throws RegionNotFoundException { Region rgn; Cache cache = context.getCache(); // do PR bucketRegion substitution here for expressions that evaluate to a Region. PartitionedRegion pr = context.getPartitionedRegion(); if (pr != null && pr.getFullPath().equals(this.regionPath)) { rgn = context.getBucketRegion(); } else if (pr != null) { // Asif : This is a very tricky solution to allow equijoin queries on PartitionedRegion // locally // We have possibly got a situation of equijoin. it may be across PRs. so use the context's // bucket region // to get ID and then retrieve the this region's bucket region BucketRegion br = context.getBucketRegion(); int bucketID = br.getId(); // Is current region a partitioned region rgn = cache.getRegion(this.regionPath); if (rgn.getAttributes().getDataPolicy().withPartitioning()) { // convert it into bucket region. PartitionedRegion prLocal = (PartitionedRegion) rgn; rgn = prLocal.getDataStore().getLocalBucketById(bucketID); } } else { rgn = cache.getRegion(this.regionPath); } if (rgn == null) { // if we couldn't find the region because the cache is closed, throw // a CacheClosedException if (cache.isClosed()) { throw new CacheClosedException(); } throw new RegionNotFoundException( LocalizedStrings.CompiledRegion_REGION_NOT_FOUND_0.toLocalizedString(this.regionPath)); } if (context.isCqQueryContext()) { return new QRegion(rgn, true, context); } else { return new QRegion(rgn, false, context); } }
/** * Populate cache with {@code 'facts'}, which in our case are {@link FactPurchase} objects. * * @param factCache Cache to populate. * @throws IgniteException If failed. */ private static void populateFacts(Cache<Integer, FactPurchase> factCache) throws IgniteException { for (int i = 0; i < 100; i++) { int id = idGen++; DimStore store = rand(dataStore.values()); DimProduct prod = rand(dataProduct.values()); factCache.put(id, new FactPurchase(id, prod.getId(), store.getId(), (i + 1))); } }
void addToPlaylist(Track t, Playlist p) { if (interfaceDisabled) return; Cache.pushToPlaylist(t, p); if (p == getSelectedPlaylist()) { Platform.runLater( () -> { loadSelectedPlaylist(); }); } }
public JsonObject getSocialObject(String targetId, String tenant, SortOrder order) { List<Activity> activities = listActivitiesChronologically(targetId, tenant); ActivitySummarizer summarizer = new ActivitySummarizer(targetId, order); for (Activity activity : activities) { summarizer.add(activity); } JsonObject summarize = summarizer.summarize(); cache.put(targetId, tenant, summarize); return summarize; }
Cache getCache() throws SQLException { if (cCache == null) { cCache = new Cache(sFileCache, this.dDatabase); cCache.open(bReadOnly); } return (cCache); }
/** * Method declaration * * @param compact * @throws SQLException */ void close(boolean compact) throws SQLException { if (Trace.TRACE) { Trace.trace(); } if (bReadOnly) { return; } // no more scripting closeScript(); // create '.script.new' (for this the cache may be still required) writeScript(compact); // flush the cache (important: after writing the script) if (cCache != null) { cCache.flush(); } closeAllTextCaches(compact); // create '.backup.new' using the '.data' backup(); // we have the new files pProperties.setProperty("modified", "yes-new-files"); pProperties.save(); // old files can be removed and new files renamed renameNewToCurrent(sFileScript); renameNewToCurrent(sFileBackup); // now its done completely pProperties.setProperty("modified", "no"); pProperties.setProperty("version", jdbcDriver.VERSION); pProperties.setProperty("hsqldb.compatible_version", "1.7.0"); pProperties.save(); pProperties.close(); if (compact) { // stop the runner thread of this process (just for security) stop(); // delete the .data so then a new file is created (new File(sFileCache)).delete(); (new File(sFileBackup)).delete(); // tony_lai@users 20020820 // The database re-open and close has been moved to // Database#close(int closemode) for saving memory usage. } }
float[] get_Q(int i, int len) { float[][] data = new float[1][]; int start; if((start = cache.get_data(i,data,len)) < len) { for(int j=start;j<len;j++) data[0][j] = (float)kernel_function(i,j); } return data[0]; }
void deleteDeadFromOfflinePlaylist(Playlist p) { if (interfaceDisabled) return; infoLabel.setText(res.getString("deleting_dead_items")); log.info("Deleting dead items from " + p.getTitle()); Integer deleted = Cache.deleteDead(p); infoLabel.setText(String.format(res.getString("deleted_dead_items"), deleted)); log.info("Deleted dead items from " + p.getTitle() + ": " + deleted); Platform.runLater( () -> { loadSelectedPlaylist(); }); }
/** * Replaces an existing RelTrait in the set. Returns a different trait set; does not modify this * trait set. * * @param index 0-based index into ordered RelTraitSet * @param trait the new RelTrait * @return the old RelTrait at the index */ public RelTraitSet replace(int index, RelTrait trait) { assert traits[index].getTraitDef() == trait.getTraitDef() : "RelTrait has different RelTraitDef than replacement"; RelTrait canonizedTrait = canonize(trait); if (traits[index] == canonizedTrait) { return this; } RelTrait[] newTraits = traits.clone(); newTraits[index] = canonizedTrait; return cache.getOrAdd(new RelTraitSet(cache, newTraits)); }
void updatePlaylists() { if (interfaceDisabled) return; final ObservableList<Playlist> items = playlistsView.getItems(); items.clear(); items.addAll(Cache.playlists()); if (Settings.rememberedPlaylistId != null) for (Playlist p : items) { if (p.getId().equals(Settings.rememberedPlaylistId)) { rememberedPlaylist = p; break; } } }
/** * Populate cache with {@code 'dimensions'} which in our case are {@link DimStore} and {@link * DimProduct} instances. * * @param dimCache Cache to populate. * @throws IgniteException If failed. */ private static void populateDimensions(Cache<Integer, Object> dimCache) throws IgniteException { DimStore store1 = new DimStore(idGen++, "Store1", "12345", "321 Chilly Dr, NY"); DimStore store2 = new DimStore(idGen++, "Store2", "54321", "123 Windy Dr, San Francisco"); // Populate stores. dimCache.put(store1.getId(), store1); dimCache.put(store2.getId(), store2); dataStore.put(store1.getId(), store1); dataStore.put(store2.getId(), store2); // Populate products for (int i = 0; i < 20; i++) { int id = idGen++; DimProduct product = new DimProduct(id, "Product" + i, i + 1, (i + 1) * 10); dimCache.put(id, product); dataProduct.put(id, product); } }
/** * Returns this trait set with a given trait added or overridden. Does not modify this trait set. * * @param trait Trait * @return Trait set with given trait */ public RelTraitSet plus(RelTrait trait) { if (contains(trait)) { return this; } int i = findIndex(trait.getTraitDef()); if (i >= 0) { return replace(i, trait); } final RelTrait canonizedTrait = canonize(trait); RelTrait[] newTraits = Arrays.copyOf(traits, traits.length + 1); newTraits[newTraits.length - 1] = canonizedTrait; return cache.getOrAdd(new RelTraitSet(cache, newTraits)); }
/* * (non-Javadoc) * @see org.mmbase.module.core.MMObjectBuilder#notify(org.mmbase.core.event.NodeEvent) */ @Override public void notify(NodeEvent event) { if (log.isDebugEnabled()) { log.debug( "Changed " + event.getMachine() + " " + event.getNodeNumber() + " " + event.getBuilderName() + " " + NodeEvent.newTypeToOldType(event.getType())); } if (tableName.equals(event.getBuilderName())) { if (event.getType() == Event.TYPE_NEW) { MMObjectNode typeRelNode = getNode(event.getNodeNumber()); if (typeRelNode != null) { Set<MMObjectNode> newTypeRels = addCacheEntry(typeRelNode, true); log.service("Added to typerelcache: " + newTypeRels); } else { log.warn("Could not found typerel node with number " + event.getNodeNumber()); } } else { // something else changed in a typerel node? reread the complete typeRelNodes Set log.service( "Received '" + event + "' which is about typrels. Now re-reading the entire cache"); readCache(); } // also, clear all query-caches, because result may change by this. See MMB-348 for (Cache qc : CacheManager.getMap().values()) { if (qc instanceof QueryResultCache) { qc.clear(); } } } super.notify(event); }