protected RolapStar.Measure getMeasure(String cube, String measureName) { final Connection connection = getFoodMartConnection(); final boolean fail = true; Cube salesCube = connection.getSchema().lookupCube(cube, fail); Member measure = salesCube .getSchemaReader(null) .getMemberByUniqueName(Util.parseIdentifier(measureName), fail); return RolapStar.getStarMeasure(measure); }
protected CellRequest createRequest( final String cube, final String measureName, final String[] tables, final String[] columns, final String[] values) { RolapStar.Measure starMeasure = getMeasure(cube, measureName); CellRequest request = new CellRequest(starMeasure, false, false); final RolapStar star = starMeasure.getStar(); for (int i = 0; i < tables.length; i++) { String table = tables[i]; if (table != null && table.length() > 0) { String column = columns[i]; String value = values[i]; final RolapStar.Column storeTypeColumn = star.lookupColumn(table, column); request.addConstrainedColumn( storeTypeColumn, new ValueColumnPredicate(storeTypeColumn, value)); } } return request; }
StarPredicate toPredicate(RolapStar star) { RolapStar.Column starColumn[] = new RolapStar.Column[tables.length]; for (int i = 0; i < tables.length; i++) { String table = tables[i]; String column = columns[i]; starColumn[i] = star.lookupColumn(table, column); } List<StarPredicate> orPredList = new ArrayList<StarPredicate>(); for (String[] values : valueList) { assert (values.length == tables.length); List<StarPredicate> andPredList = new ArrayList<StarPredicate>(); for (int i = 0; i < values.length; i++) { andPredList.add(new ValueColumnPredicate(starColumn[i], values[i])); } final StarPredicate predicate = andPredList.size() == 1 ? andPredList.get(0) : new AndPredicate(andPredList); orPredList.add(predicate); } return orPredList.size() == 1 ? orPredList.get(0) : new OrPredicate(orPredList); }
BitKey getBitKey(RolapStar star) { return star.getBitKey(tables, columns); }
/** * Checks that a given sequence of cell requests results in a particular SQL statement being * generated. * * <p>Always clears the cache before running the requests. * * <p>Runs the requests once for each SQL pattern in the current dialect. If there are multiple * patterns, runs the MDX query multiple times, and expects to see each SQL statement appear. If * there are no patterns in this dialect, the test trivially succeeds. * * @param requests Sequence of cell requests * @param patterns Set of patterns * @param negative Set to false in order to 'expect' a query or true to 'forbid' a query. */ protected void assertRequestSql(CellRequest[] requests, SqlPattern[] patterns, boolean negative) { final RolapStar star = requests[0].getMeasure().getStar(); final String cubeName = requests[0].getMeasure().getCubeName(); final RolapCube cube = lookupCube(cubeName); final Dialect sqlDialect = star.getSqlQueryDialect(); Dialect.DatabaseProduct d = sqlDialect.getDatabaseProduct(); SqlPattern sqlPattern = SqlPattern.getPattern(d, patterns); if (d == Dialect.DatabaseProduct.UNKNOWN) { // If the dialect is not one in the pattern set, do not run the // test. We do not print any warning message. return; } boolean patternFound = false; for (SqlPattern pattern : patterns) { if (!pattern.hasDatabaseProduct(d)) { continue; } patternFound = true; clearCache(cube); String sql = sqlPattern.getSql(); String trigger = sqlPattern.getTriggerSql(); switch (d) { case ORACLE: sql = sql.replaceAll(" =as= ", " "); trigger = trigger.replaceAll(" =as= ", " "); break; case TERADATA: sql = sql.replaceAll(" =as= ", " as "); trigger = trigger.replaceAll(" =as= ", " as "); break; } // Create a dummy DataSource which will throw a 'bomb' if it is // asked to execute a particular SQL statement, but will otherwise // behave exactly the same as the current DataSource. RolapUtil.setHook(new TriggerHook(trigger)); Bomb bomb; final Execution execution = new Execution(((RolapConnection) getConnection()).getInternalStatement(), 1000); final AggregationManager aggMgr = execution .getMondrianStatement() .getMondrianConnection() .getServer() .getAggregationManager(); final Locus locus = new Locus(execution, "BatchTestCase", "BatchTestCase"); try { FastBatchingCellReader fbcr = new FastBatchingCellReader(execution, getCube(cubeName), aggMgr); for (CellRequest request : requests) { fbcr.recordCellRequest(request); } // The FBCR will presume there is a current Locus in the stack, // so let's create a mock one. Locus.push(locus); fbcr.loadAggregations(); bomb = null; } catch (Bomb e) { bomb = e; } finally { RolapUtil.setHook(null); Locus.pop(locus); } if (!negative && bomb == null) { fail("expected query [" + sql + "] did not occur"); } else if (negative && bomb != null) { fail("forbidden query [" + sql + "] detected"); } TestContext.assertEqualsVerbose(replaceQuotes(sql), replaceQuotes(bomb.sql)); } // Print warning message that no pattern was specified for the current // dialect. if (!patternFound) { String warnDialect = MondrianProperties.instance().WarnIfNoPatternForDialect.get(); if (warnDialect.equals(d.toString())) { System.out.println( "[No expected SQL statements found for dialect \"" + sqlDialect.toString() + "\" and test not run]"); } } }
public PeekResponse call() { final RolapStar.Measure measure = request.getMeasure(); final RolapStar star = measure.getStar(); final RolapSchema schema = star.getSchema(); final AggregationKey key = new AggregationKey(request); final List<SegmentHeader> headers = indexRegistry .getIndex(star) .locate( schema.getName(), schema.getChecksum(), measure.getCubeName(), measure.getName(), star.getFactTable().getAlias(), request.getConstrainedColumnsBitKey(), request.getMappedCellValues(), AggregationKey.getCompoundPredicateStringList( star, key.getCompoundPredicateList())); final Map<SegmentHeader, Future<SegmentBody>> headerMap = new HashMap<SegmentHeader, Future<SegmentBody>>(); final Map<List, SegmentBuilder.SegmentConverter> converterMap = new HashMap<List, SegmentBuilder.SegmentConverter>(); // Is there a pending segment? (A segment that has been created and // is loading via SQL.) for (final SegmentHeader header : headers) { final Future<SegmentBody> bodyFuture = indexRegistry.getIndex(star).getFuture(header); if (bodyFuture != null) { // Check if the DataSourceChangeListener wants us to clear // the current segment if (star.getChangeListener() != null && star.getChangeListener().isAggregationChanged(key)) { /* * We can't satisfy this request, and we must clear the * data from our cache. We clear it from the index * first, then queue up a job in the background * to remove the data from all the caches. */ indexRegistry.getIndex(star).remove(header); Util.safeGet( cacheExecutor.submit( new Runnable() { public void run() { try { compositeCache.remove(header); } catch (Throwable e) { LOGGER.warn("remove header failed: " + header, e); } } }), "SegmentCacheManager.peek"); } converterMap.put( SegmentCacheIndexImpl.makeConverterKey(header), getConverter(star, header)); headerMap.put(header, bodyFuture); } } return new PeekResponse(headerMap, converterMap); }