private void validateFilterPushDown(GTInfo info) { if (!hasFilterPushDown()) return; Set<TblColRef> filterColumns = Sets.newHashSet(); TupleFilter.collectColumns(filterPushDown, filterColumns); for (TblColRef col : filterColumns) { // filter columns must belong to the table info.validateColRef(col); // filter columns must be returned to satisfy upper layer evaluation (calcite) columns = columns.set(col.getColumnDesc().getZeroBasedIndex()); } // un-evaluatable filter must be removed if (!TupleFilter.isEvaluableRecursively(filterPushDown)) { Set<TblColRef> unevaluableColumns = Sets.newHashSet(); filterPushDown = GTUtil.convertFilterUnevaluatable(filterPushDown, info, unevaluableColumns); // columns in un-evaluatable filter must be returned without loss so upper layer can do final // evaluation if (hasAggregation()) { for (TblColRef col : unevaluableColumns) { aggrGroupBy = aggrGroupBy.set(col.getColumnDesc().getZeroBasedIndex()); } } } }
private CompareTupleFilter mergeToInClause(TupleFilter filter) { List<? extends TupleFilter> children = filter.getChildren(); TblColRef inColumn = null; List<String> inValues = new LinkedList<String>(); for (TupleFilter child : children) { if (child.getOperator() == FilterOperatorEnum.EQ) { CompareTupleFilter compFilter = (CompareTupleFilter) child; TblColRef column = compFilter.getColumn(); if (inColumn == null) { inColumn = column; } if (column == null || !column.equals(inColumn)) { return null; } inValues.addAll(compFilter.getValues()); } else { return null; } } children.clear(); CompareTupleFilter inFilter = new CompareTupleFilter(FilterOperatorEnum.IN); inFilter.addChild(new ColumnTupleFilter(inColumn)); inFilter.addChild(new ConstantTupleFilter(inValues)); return inFilter; }
public static Pair<TupleFilter, Boolean> translate( LookupStringTable lookup, DeriveInfo hostInfo, CompareTupleFilter compf) { TblColRef derivedCol = compf.getColumn(); TblColRef[] hostCols = hostInfo.columns; TblColRef[] pkCols = hostInfo.dimension.getJoin().getPrimaryKeyColumns(); if (hostInfo.type == DeriveType.PK_FK) { assert hostCols.length == 1; CompareTupleFilter newComp = new CompareTupleFilter(compf.getOperator()); newComp.addChild(new ColumnTupleFilter(hostCols[0])); newComp.addChild(new ConstantTupleFilter(compf.getValues())); return new Pair<TupleFilter, Boolean>(newComp, false); } assert hostInfo.type == DeriveType.LOOKUP; assert hostCols.length == pkCols.length; int di = derivedCol.getColumn().getZeroBasedIndex(); int[] pi = new int[pkCols.length]; int hn = hostCols.length; for (int i = 0; i < hn; i++) { pi[i] = pkCols[i].getColumn().getZeroBasedIndex(); } Set<Array<String>> satisfyingHostRecords = Sets.newHashSet(); SingleColumnTuple tuple = new SingleColumnTuple(derivedCol); for (String[] row : lookup.getAllRows()) { tuple.value = row[di]; if (compf.evaluate(tuple)) { collect(row, pi, satisfyingHostRecords); } } TupleFilter translated; boolean loosened; if (satisfyingHostRecords.size() > IN_THRESHOLD) { translated = buildRangeFilter(hostCols, satisfyingHostRecords); loosened = true; } else { translated = buildInFilter(hostCols, satisfyingHostRecords); loosened = false; } return new Pair<TupleFilter, Boolean>(translated, loosened); }
private void init(Collection<ColumnValueRange> andDimensionRanges) { int size = andDimensionRanges.size(); Map<TblColRef, String> startValues = Maps.newHashMapWithExpectedSize(size); Map<TblColRef, String> stopValues = Maps.newHashMapWithExpectedSize(size); Map<TblColRef, Set<String>> fuzzyValues = Maps.newHashMapWithExpectedSize(size); for (ColumnValueRange dimRange : andDimensionRanges) { TblColRef column = dimRange.getColumn(); startValues.put(column, dimRange.getBeginValue()); stopValues.put(column, dimRange.getEndValue()); fuzzyValues.put(column, dimRange.getEqualValues()); TblColRef partitionDateColumnRef = cubeSeg.getCubeDesc().getModel().getPartitionDesc().getPartitionDateColumnRef(); if (column.equals(partitionDateColumnRef)) { initPartitionRange(dimRange); } } AbstractRowKeyEncoder encoder = AbstractRowKeyEncoder.createInstance(cubeSeg, cuboid); encoder.setBlankByte(RowConstants.ROWKEY_LOWER_BYTE); this.startKey = encoder.encode(startValues); encoder.setBlankByte(RowConstants.ROWKEY_UPPER_BYTE); // In order to make stopRow inclusive add a trailing 0 byte. #See // Scan.setStopRow(byte [] stopRow) this.stopKey = Bytes.add(encoder.encode(stopValues), ZERO_TAIL_BYTES); // restore encoder defaults for later reuse (note // AbstractRowKeyEncoder.createInstance() caches instances) encoder.setBlankByte(AbstractRowKeyEncoder.DEFAULT_BLANK_BYTE); // always fuzzy match cuboid ID to lock on the selected cuboid this.fuzzyKeys = buildFuzzyKeys(fuzzyValues); }