private final PredicatePrecision computeNewPrecision() { // get previous precision UnmodifiableReachedSet unmodifiableReached = reached.asReachedSet(); logger.log(Level.FINEST, "Removing everything below", refinementRoot, "from ARG."); // now create new precision precisionUpdate.start(); PredicatePrecision basePrecision = findAllPredicatesFromSubgraph(refinementRoot, unmodifiableReached); logger.log(Level.ALL, "Old predicate map is", basePrecision); logger.log(Level.ALL, "New predicates are", newPredicates); PredicatePrecision newPrecision = basePrecision.addLocalPredicates(newPredicates.entries()); logger.log(Level.ALL, "Predicate map now is", newPrecision); assert basePrecision.calculateDifferenceTo(newPrecision) == 0 : "We forgot predicates during refinement!"; precisionUpdate.stop(); return newPrecision; }
/** * Tests whether data in valuesIterator matches with sorted input data set. * * <p>Returns a list of value counts for each key. * * @param valuesIterator * @return List * @throws IOException */ private List<Integer> verifyIteratorData(ValuesIterator valuesIterator) throws IOException { boolean result = true; ArrayList<Integer> sequence = new ArrayList<Integer>(); // sort original data based on comparator ListMultimap<Writable, Writable> sortedMap = new ImmutableListMultimap.Builder<Writable, Writable>() .orderKeysBy(this.correctComparator) .putAll(originalData) .build(); Set<Map.Entry<Writable, Writable>> oriKeySet = Sets.newSet(); oriKeySet.addAll(sortedMap.entries()); // Iterate through sorted data and valuesIterator for verification for (Map.Entry<Writable, Writable> entry : oriKeySet) { assertTrue(valuesIterator.moveToNext()); Writable oriKey = entry.getKey(); // Verify if the key and the original key are same if (!oriKey.equals((Writable) valuesIterator.getKey())) { result = false; break; } int valueCount = 0; // Verify values Iterator<Writable> vItr = valuesIterator.getValues().iterator(); for (Writable val : sortedMap.get(oriKey)) { assertTrue(vItr.hasNext()); // Verify if the values are same if (!val.equals((Writable) vItr.next())) { result = false; break; } valueCount++; } sequence.add(valueCount); assertTrue("At least 1 value per key", valueCount > 0); } if (expectedTestResult) { assertTrue(result); assertFalse(valuesIterator.moveToNext()); getNextFromFinishedIterator(valuesIterator); } else { while (valuesIterator.moveToNext()) { // iterate through all keys } getNextFromFinishedIterator(valuesIterator); assertFalse(result); } return sequence; }
/** * This method exports the current representation in dot format to the given file. * * @param file file the file to write to */ public void exportToDot(PathTemplate file, int refinementCounter) { StringBuilder result = new StringBuilder().append("digraph tree {" + "\n"); for (Map.Entry<ARGState, ARGState> current : successorRelation.entries()) { if (interpolants.containsKey(current.getKey())) { StringBuilder sb = new StringBuilder(); sb.append("itp is " + interpolants.get(current.getKey())); result.append( current.getKey().getStateId() + " [label=\"" + (current.getKey().getStateId() + " / " + AbstractStates.extractLocation(current.getKey())) + " has itp " + (sb.toString()) + "\"]" + "\n"); result.append( current.getKey().getStateId() + " -> " + current.getValue().getStateId() + "\n"); // + " [label=\"" + // current.getKey().getEdgeToChild(current.getValue()).getRawStatement().replace("\n", "") + "\"]\n"); } else { result.append( current.getKey().getStateId() + " [label=\"" + current.getKey().getStateId() + " has itp NA\"]" + "\n"); result.append( current.getKey().getStateId() + " -> " + current.getValue().getStateId() + "\n"); // + " [label=\"" + // current.getKey().getEdgeToChild(current.getValue()).getRawStatement().replace("\n", "") + "\"]\n"); } if (current.getValue().isTarget()) { result.append( current.getValue().getStateId() + " [style=filled, fillcolor=\"red\"]" + "\n"); } assert (!current.getKey().isTarget()); } result.append("}"); try { MoreFiles.writeFile( file.getPath(refinementCounter, interpolationCounter), Charset.defaultCharset(), result); } catch (IOException e) { logger.logUserException(Level.WARNING, e, "Could not write interpolation tree to file"); } }
/** * Updates the internal permissions cache for a single table, splitting the permissions listed * into separate caches for users and groups to optimize group lookups. * * @param table * @param tablePerms */ private void updateTableCache(byte[] table, ListMultimap<String, TablePermission> tablePerms) { PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>(); for (Map.Entry<String, TablePermission> entry : tablePerms.entries()) { if (AccessControlLists.isGroupPrincipal(entry.getKey())) { newTablePerms.putGroup(AccessControlLists.getGroupName(entry.getKey()), entry.getValue()); } else { newTablePerms.putUser(entry.getKey(), entry.getValue()); } } tableCache.put(table, newTablePerms); }
/** * Updates the internal global permissions cache * * @param userPerms */ private void updateGlobalCache(ListMultimap<String, TablePermission> userPerms) { PermissionCache<Permission> newCache = null; try { newCache = initGlobal(conf); for (Map.Entry<String, TablePermission> entry : userPerms.entries()) { if (AccessControlLists.isGroupPrincipal(entry.getKey())) { newCache.putGroup( AccessControlLists.getGroupName(entry.getKey()), new Permission(entry.getValue().getActions())); } else { newCache.putUser(entry.getKey(), new Permission(entry.getValue().getActions())); } } globalCache = newCache; } catch (IOException e) { // Never happens LOG.error("Error occured while updating the global cache", e); } }
/** @return */ private String generateSynonymsQuery() { ListMultimap<String, String> fieldList = ArrayListMultimap.create(); if (StringUtils.isNotBlank(themeKeyword)) { fieldList.put(SolrRecord.LAYER_DISPLAY_NAME_SYNONYMS, "(" + themeKeyword + ")"); fieldList.put(SolrRecord.THEME_KEYWORDS_SYNONYMS_LCSH, "(" + themeKeyword + ")"); } if (StringUtils.isNotBlank(placeKeyword)) { fieldList.put(SolrRecord.PLACE_KEYWORDS_SYNONYMS, "(" + placeKeyword + ")"); fieldList.put(SolrRecord.LAYER_DISPLAY_NAME_SYNONYMS, "(" + placeKeyword + ")"); } StringBuilder synonymsQuery = new StringBuilder(); Iterator<Entry<String, String>> entryIterator = fieldList.entries().iterator(); while (entryIterator.hasNext()) { Entry<String, String> entry = entryIterator.next(); synonymsQuery.append(entry.getKey()).append(":").append(entry.getValue()); if (entryIterator.hasNext()) { synonymsQuery.append(" OR "); } } return synonymsQuery.toString(); }
@Test public void testAclTableEntries() throws Exception { String userTestNamespace = "userTestNsp"; Table acl = UTIL.getConnection().getTable(AccessControlLists.ACL_TABLE_NAME); try { ListMultimap<String, TablePermission> perms = AccessControlLists.getNamespacePermissions(conf, TEST_NAMESPACE); perms = AccessControlLists.getNamespacePermissions(conf, TEST_NAMESPACE); for (Map.Entry<String, TablePermission> entry : perms.entries()) { LOG.debug(entry); } assertEquals(6, perms.size()); // Grant and check state in ACL table grantOnNamespace(UTIL, userTestNamespace, TEST_NAMESPACE, Permission.Action.WRITE); Result result = acl.get(new Get(Bytes.toBytes(userTestNamespace))); assertTrue(result != null); perms = AccessControlLists.getNamespacePermissions(conf, TEST_NAMESPACE); assertEquals(7, perms.size()); List<TablePermission> namespacePerms = perms.get(userTestNamespace); assertTrue(perms.containsKey(userTestNamespace)); assertEquals(1, namespacePerms.size()); assertEquals(TEST_NAMESPACE, namespacePerms.get(0).getNamespace()); assertEquals(null, namespacePerms.get(0).getFamily()); assertEquals(null, namespacePerms.get(0).getQualifier()); assertEquals(1, namespacePerms.get(0).getActions().length); assertEquals(Permission.Action.WRITE, namespacePerms.get(0).getActions()[0]); // Revoke and check state in ACL table revokeFromNamespace(UTIL, userTestNamespace, TEST_NAMESPACE, Permission.Action.WRITE); perms = AccessControlLists.getNamespacePermissions(conf, TEST_NAMESPACE); assertEquals(6, perms.size()); } finally { acl.close(); } }