Exemplo n.º 1
0
  /**
   * Sets the value of a cell.
   *
   * @param connection Connection (not currently used)
   * @param members Coordinates of cell
   * @param newValue New value
   * @param currentValue Current value
   * @param allocationPolicy Allocation policy
   * @param allocationArgs Additional arguments of allocation policy
   */
  public void setCellValue(
      Connection connection,
      List<RolapMember> members,
      double newValue,
      double currentValue,
      AllocationPolicy allocationPolicy,
      Object[] allocationArgs) {
    Util.discard(connection); // for future use
    assert allocationPolicy != null;
    assert allocationArgs != null;
    switch (allocationPolicy) {
      case EQUAL_ALLOCATION:
      case EQUAL_INCREMENT:
        if (allocationArgs.length != 0) {
          throw Util.newError(
              "Allocation policy "
                  + allocationPolicy
                  + " takes 0 arguments; "
                  + allocationArgs.length
                  + " were supplied");
        }
        break;
      default:
        throw Util.newError("Allocation policy " + allocationPolicy + " is not supported");
    }

    // Compute the set of columns which are constrained by the cell's
    // coordinates.
    //
    // NOTE: This code is very similar to code in
    // RolapAggregationManager.makeCellRequest. Consider creating a
    // CellRequest then mining it. It will work better in the presence of
    // calculated members, compound members, parent-child hierarchies,
    // hierarchies whose default member is not the 'all' member, and so
    // forth.
    final RolapStoredMeasure measure = (RolapStoredMeasure) members.get(0);
    final RolapCube baseCube = measure.getCube();
    final RolapStar.Measure starMeasure = (RolapStar.Measure) measure.getStarMeasure();
    assert starMeasure != null;
    int starColumnCount = starMeasure.getStar().getColumnCount();
    final BitKey constrainedColumnsBitKey = BitKey.Factory.makeBitKey(starColumnCount);
    Object[] keyValues = new Object[starColumnCount];
    for (int i = 1; i < members.size(); i++) {
      Member member = members.get(i);
      for (RolapCubeMember m = (RolapCubeMember) member;
          m != null && !m.isAll();
          m = m.getParentMember()) {
        final RolapCubeLevel level = m.getLevel();
        RolapStar.Column column = level.getBaseStarKeyColumn(baseCube);
        if (column != null) {
          final int bitPos = column.getBitPosition();
          keyValues[bitPos] = m.getKey();
          constrainedColumnsBitKey.set(bitPos);
        }
        if (level.areMembersUnique()) {
          break;
        }
      }
    }

    // Squish the values down. We want the compactKeyValues[i] to correspond
    // to the i'th set bit in the key. This is the same format used by
    // CellRequest.
    Object[] compactKeyValues = new Object[constrainedColumnsBitKey.cardinality()];
    int k = 0;
    for (int bitPos : constrainedColumnsBitKey) {
      compactKeyValues[k++] = keyValues[bitPos];
    }

    // Record the override.
    //
    // TODO: add a mechanism for persisting the overrides to a file.
    //
    // FIXME: make thread-safe
    writebackCells.add(
        new WritebackCell(
            baseCube,
            new ArrayList<RolapMember>(members),
            constrainedColumnsBitKey,
            compactKeyValues,
            newValue,
            currentValue,
            allocationPolicy));
  }
Exemplo n.º 2
0
    public FlushResult call() throws Exception {
      // For each measure and each star, ask the index
      // which headers intersect.
      final List<SegmentHeader> headers = new ArrayList<SegmentHeader>();
      final List<Member> measures = CacheControlImpl.findMeasures(region);
      final SegmentColumn[] flushRegion = CacheControlImpl.findAxisValues(region);
      final List<RolapStar> starList = CacheControlImpl.getStarList(region);

      for (Member member : measures) {
        if (!(member instanceof RolapStoredMeasure)) {
          continue;
        }
        final RolapStoredMeasure storedMeasure = (RolapStoredMeasure) member;
        final RolapStar star = storedMeasure.getCube().getStar();
        final SegmentCacheIndex index = cacheMgr.indexRegistry.getIndex(star);
        headers.addAll(
            index.intersectRegion(
                member.getDimension().getSchema().getName(),
                ((RolapSchema) member.getDimension().getSchema()).getChecksum(),
                storedMeasure.getCube().getName(),
                storedMeasure.getName(),
                storedMeasure.getCube().getStar().getFactTable().getAlias(),
                flushRegion));
      }

      // If flushRegion is empty, this means we must clear all
      // segments for the region's measures.
      if (flushRegion.length == 0) {
        for (final SegmentHeader header : headers) {
          for (RolapStar star : starList) {
            cacheMgr.indexRegistry.getIndex(star).remove(header);
          }

          // Remove the segment from external caches. Use an
          // executor, because it may take some time. We discard
          // the future, because we don't care too much if it fails.
          cacheControlImpl.trace(
              "discard segment - it cannot be constrained and maintain consistency:\n"
                  + header.getDescription());

          final Future<?> task =
              cacheMgr.cacheExecutor.submit(
                  new Runnable() {
                    public void run() {
                      try {
                        // Note that the SegmentCache API doesn't
                        // require us to verify that the segment
                        // exists (by calling "contains") before we
                        // call "remove".
                        cacheMgr.compositeCache.remove(header);
                      } catch (Throwable e) {
                        LOGGER.warn("remove header failed: " + header, e);
                      }
                    }
                  });
          Util.safeGet(task, "SegmentCacheManager.flush");
        }
        return new FlushResult(Collections.<Callable<Boolean>>emptyList());
      }

      // Now we know which headers intersect. For each of them,
      // we append an excluded region.
      //
      // TODO: Optimize the logic here. If a segment is mostly
      // empty, we should trash it completely.
      final List<Callable<Boolean>> callableList = new ArrayList<Callable<Boolean>>();
      for (final SegmentHeader header : headers) {
        if (!header.canConstrain(flushRegion)) {
          // We have to delete that segment altogether.
          cacheControlImpl.trace(
              "discard segment - it cannot be constrained and maintain consistency:\n"
                  + header.getDescription());
          for (RolapStar star : starList) {
            cacheMgr.indexRegistry.getIndex(star).remove(header);
          }
          continue;
        }
        final SegmentHeader newHeader = header.constrain(flushRegion);
        for (final SegmentCacheWorker worker : cacheMgr.segmentCacheWorkers) {
          callableList.add(
              new Callable<Boolean>() {
                public Boolean call() throws Exception {
                  boolean existed;
                  if (worker.supportsRichIndex()) {
                    final SegmentBody sb = worker.get(header);
                    existed = worker.remove(header);
                    if (sb != null) {
                      worker.put(newHeader, sb);
                    }
                  } else {
                    // The cache doesn't support rich index. We
                    // have to clear the segment entirely.
                    existed = worker.remove(header);
                  }
                  return existed;
                }
              });
        }
        for (RolapStar star : starList) {
          SegmentCacheIndex index = cacheMgr.indexRegistry.getIndex(star);
          index.remove(header);
          index.add(newHeader, false, null);
        }
      }

      // Done
      return new FlushResult(callableList);
    }