private synchronized void writeEvent( String message, Widget widget, int type, Event event, boolean send) { if (!active || writer == null) return; AutState state = detectAutState(); if (lastState == null || lastState != state) { writeAnnouncement("%s is the current AUT mode", state); lastState = state; } for (IFilter f : filters) if (f.filter(widget, type, event, send)) return; Map<String, Object> snapshot = makeSnapshot(event); if (widget == lastWidget && (lastType != null && type == lastType) && areSnapshotsEqual(snapshot, lastSnapshot)) { write("%s...", message); return; } lastWidget = widget; lastType = type; lastSnapshot = snapshot; write( "%s %s%s %s - %s\n %s\n", message, formatTags(widget, type, event, send), formatEventName(type, event), formatObject(widget), new SimpleDateFormat("yyyy/MM/dd HH:mm:ss:SSS").format(new Date()), formatEvent(widget, snapshot)); if (logStackTraceOnSend || !send) writeStackTrace(); }
@Override public boolean evaluate(Object arg0) { if (_filters == null) return true; for (IFilter<BaseClass, Element> filter : _filters) { if (!filter.evaluate(arg0)) return false; } return true; }
/** * Adds a filter to the chain * * @param filter the filter instance to be registered * @return the registered instance */ public IFilter addFilter(IFilter filter) { chain.put(filter.getID(), filter); logger.trace( Messages.getString("FilterChain.FILTER_ADDED") + filter.getType() + " - " + filter.getDescription()); return filter; }
@Override public boolean accept(T value) { for (IFilter<T> filter : innerFilters) { if (!filter.accept(value)) return false; } return true; }
/** * This method does the actual filtering. It implements the main logic of the filter. * * @param id the id to be checked * @return <em>null</em> if the id should be filtered and the id if it satisfies the rules. */ public String apply(String id) { Set<String> filterKeys = chain.keySet(); for (Iterator<String> iter = filterKeys.iterator(); iter.hasNext(); ) { IFilter filter = chain.get(iter.next()); if (filter.apply(id) == null) return null; } return id; }
// TODO move gcBefore into a field public void collateColumns( final ColumnFamily returnCF, List<? extends CloseableIterator<IColumn>> toCollate, final int gcBefore) { IFilter topLevelFilter = (superFilter == null ? filter : superFilter); Comparator<IColumn> fcomp = topLevelFilter.getColumnComparator(returnCF.getComparator()); // define a 'reduced' iterator that merges columns w/ the same name, which // greatly simplifies computing liveColumns in the presence of tombstones. MergeIterator.Reducer<IColumn, IColumn> reducer = new MergeIterator.Reducer<IColumn, IColumn>() { ColumnFamily curCF = returnCF.cloneMeShallow(); public void reduce(IColumn current) { if (curCF.isSuper() && curCF.isEmpty()) { // If it is the first super column we add, we must clone it since other super column // may modify // it otherwise and it could be aliased in a memtable somewhere. We'll also don't have // to care about what // consumers make of the result (for instance CFS.getColumnFamily() call // removeDeleted() on the // result which removes column; which shouldn't be done on the original super column). assert current instanceof SuperColumn; curCF.addColumn(((SuperColumn) current).cloneMe()); } else { curCF.addColumn(current); } } protected IColumn getReduced() { IColumn c = curCF.iterator().next(); if (superFilter != null) { // filterSuperColumn only looks at immediate parent (the supercolumn) when determining // if a subcolumn // is still live, i.e., not shadowed by the parent's tombstone. so, bump it up // temporarily to the tombstone // time of the cf, if that is greater. long deletedAt = c.getMarkedForDeleteAt(); if (returnCF.getMarkedForDeleteAt() > deletedAt) ((SuperColumn) c).delete(c.getLocalDeletionTime(), returnCF.getMarkedForDeleteAt()); c = filter.filterSuperColumn((SuperColumn) c, gcBefore); ((SuperColumn) c) .delete( c.getLocalDeletionTime(), deletedAt); // reset sc tombstone time to what it should be } curCF.clear(); return c; } }; Iterator<IColumn> reduced = MergeIterator.get(toCollate, fcomp, reducer); topLevelFilter.collectReducedColumns(returnCF, reduced, gcBefore); }
@Override public void check(ChunkReader reader, BitVector docs) { BitVector temp = new BitVector(docs.size()); docs.setAll(); for (IFilter filter : m_filters) { temp.clearAll(); filter.check(reader, temp); docs.and(temp); if (docs.isAllBitsCleared()) break; } }
@Override public int check(ChunkInfo info) { // any filter reports no docs => no docs // all filters report all docs => all docs boolean hasZero = false; for (IFilter filter : m_filters) { int c = filter.check(info); if (c == -1) return -1; if (c == 0) hasZero = true; } return hasZero ? 0 : 1; }
/** * This method passes through all the java packages and calls the filter callback passed on each * package found. * * <p>If true is returned on the callback, the children of each package (classes) will also be * visited, otherwise, they'll be skipped. */ private void filterJavaPackages(IFilter filter) { IClasspathEntry[] rawClasspath; try { rawClasspath = this.javaProject.getRawClasspath(); FastStringBuffer buffer = new FastStringBuffer(); for (IClasspathEntry entry : rawClasspath) { int entryKind = entry.getEntryKind(); IClasspathEntry resolvedClasspathEntry = JavaCore.getResolvedClasspathEntry(entry); if (entryKind != IClasspathEntry.CPE_CONTAINER) { // ignore if it's in the system classpath... IPackageFragmentRoot[] roots = javaProject.findPackageFragmentRoots(resolvedClasspathEntry); // get the package roots for (IPackageFragmentRoot root : roots) { IJavaElement[] children = root.getChildren(); // get the actual packages for (IJavaElement child : children) { IPackageFragment childPackage = (IPackageFragment) child; String elementName = childPackage.getElementName(); // and if the java package is 'accepted' if (filter.accept(elementName, root, childPackage)) { buffer.clear(); buffer.append(elementName); int packageNameLen = buffer.length(); if (packageNameLen > 0) { buffer.append('.'); packageNameLen++; } // traverse its classes for (IJavaElement class_ : childPackage.getChildren()) { buffer.append(FullRepIterable.getFirstPart(class_.getElementName())); filter.accept(buffer.toString(), root, class_); buffer.setCount( packageNameLen); // leave only the package part for the next append } } } } } } } catch (Exception e) { throw new RuntimeException(e); } }
public static void filter(int list[], IFilter function) { int result[] = new int[list.length]; for (int i = 0; i < list.length; i++) { result[i] = function.filter(list[i]); switch (result[i]) { case 0: System.out.println(list[i] + " es par."); } } }
@Override public List<IDBRow> filter(List<IDBRow> rows) { logger.debug("Filtering.... "); // TODO this might be a subtle bug... not sure though if (ignore) { return rows; } return root.filter(rows); }
public static double testFalsePositives( IFilter f, ResetableIterator<ByteBuffer> keys, ResetableIterator<ByteBuffer> otherkeys) { assert keys.size() == otherkeys.size(); while (keys.hasNext()) { f.add(wrap(keys.next())); } int fp = 0; while (otherkeys.hasNext()) { if (f.isPresent(wrap(otherkeys.next()))) { fp++; } } double fp_ratio = fp / (keys.size() * BloomCalculations.probs[spec.bucketsPerElement][spec.K]); assert fp_ratio < 1.03 : fp_ratio; return fp_ratio; }
/* * Deserialize a particular column since the name is in the form of * superColumn:column. */ public IColumn deserialize(DataInputStream dis, String name, IFilter filter) throws IOException { if (dis.available() == 0) return null; String[] names = RowMutation.getColumnAndColumnFamily(name); if (names.length == 1) { IColumn superColumn = defreezeSuperColumn(dis); if (name.equals(superColumn.name())) { if (!superColumn.isMarkedForDelete()) { /* read the number of columns stored */ int size = dis.readInt(); /* read the size of all columns */ dis.readInt(); IColumn column = null; for (int i = 0; i < size; ++i) { column = Column.serializer().deserialize(dis, filter); if (column != null) { superColumn.addColumn(column.name(), column); column = null; if (filter.isDone()) { break; } } } } return superColumn; } else { /* read the number of columns stored */ dis.readInt(); /* read the size of all columns to skip */ int size = dis.readInt(); dis.skip(size); return null; } } SuperColumn superColumn = defreezeSuperColumn(dis); if (!superColumn.isMarkedForDelete()) { int size = dis.readInt(); /* skip the size of the columns */ dis.readInt(); if (size > 0) { for (int i = 0; i < size; ++i) { IColumn subColumn = Column.serializer().deserialize(dis, names[1], filter); if (subColumn != null) { superColumn.addColumn(subColumn.name(), subColumn); break; } } } } return superColumn; }
public IColumn deserialize(DataInputStream dis, IFilter filter) throws IOException { if (dis.available() == 0) return null; IColumn superColumn = defreezeSuperColumn(dis); superColumn = filter.filter(superColumn, dis); if (superColumn != null) { if (!superColumn.isMarkedForDelete()) fillSuperColumn(superColumn, dis); return superColumn; } else { /* read the number of columns stored */ dis.readInt(); /* read the size of all columns to skip */ int size = dis.readInt(); dis.skip(size); return null; } }
protected void setSelection(IResource[] input, IFilter filter) { Map selectionMap = new Hashtable(); for (int i = 0; i < input.length; i++) { IResource resource = input[i]; if ((resource.getType() & IResource.FILE) > 0) { if (filter.accept(resource)) { List files = null; IContainer parent = resource.getParent(); if (selectionMap.containsKey(parent)) files = (List) selectionMap.get(parent); else files = new ArrayList(); files.add(resource); selectionMap.put(parent, files); } } else setSelection(selectionMap, (IContainer) resource, filter); } fResourceGroup.updateSelections(selectionMap); updateSelectionCount(); }
private void setSelection(Map selectionMap, IContainer parent, IFilter filter) { try { IResource[] resources = parent.members(); List selections = new ArrayList(); for (int i = 0; i < resources.length; i++) { IResource resource = resources[i]; if ((resource.getType() & IResource.FILE) > 0) { if (filter.accept(resource)) selections.add(resource); } else { setSelection(selectionMap, (IContainer) resource, filter); } } if (!selections.isEmpty()) selectionMap.put(parent, selections); } catch (CoreException x) { // Just return if we can't get any info return; } }
public static <BaseClass, Element> IFilter<BaseClass, Element> makeFilter( String filterName, Object constraint) { IFilter<BaseClass, Element> filterObject = makeFilter(filterName); filterObject.setConstraint(constraint); return filterObject; }
public boolean filter(Widget widget, int type, Event event, boolean send) { return left.filter(widget, type, event, send) && right.filter(widget, type, event, send); }
public IColumnIterator getMemtableColumnIterator(ColumnFamily cf, DecoratedKey key) { assert cf != null; if (path.superColumnName == null) return filter.getMemtableColumnIterator(cf, key); return superFilter.getMemtableColumnIterator(cf, key); }
// TODO move gcBefore into a field public IColumnIterator getSSTableColumnIterator(SSTableReader sstable) { if (path.superColumnName == null) return filter.getSSTableColumnIterator(sstable, key); return superFilter.getSSTableColumnIterator(sstable, key); }
public IColumnIterator getSSTableColumnIterator( SSTableReader sstable, FileDataInput file, DecoratedKey key, RowIndexEntry indexEntry) { if (path.superColumnName == null) return filter.getSSTableColumnIterator(sstable, file, key, indexEntry); return superFilter.getSSTableColumnIterator(sstable, file, key, indexEntry); }
private boolean resourceMatches(final IFilter filter, IResource member) { return filter == null || filter.accept(member); }
/** * Removes a filter from the chain by instance search * * @param filter the instance of the filter * @return the removed instance */ public IFilter removeFilter(IFilter filter) { return chain.remove(filter.getID()); }