Exemplo n.º 1
0
 /**
  * Main method for testing this class.
  *
  * @param args should contain arguments to the filter: use -h for help
  */
 public static void main(String[] args) {
   try {
     if (Utils.getFlag('b', args)) {
       Filter.batchFilterFile(new AddExpression(), args);
     } else {
       Filter.filterFile(new AddExpression(), args);
     }
   } catch (Exception ex) {
     System.out.println(ex.getMessage());
   }
 }
Exemplo n.º 2
0
 public Document processRecord(Document record, int position) throws Exception {
   for (HashMap<String, String> map : check_fields) {
     String fieldXpath = map.get("Field Name");
     String regexp = map.get("Field Value");
     Filter.apply(record, new OpDelete(), fieldXpath, new TextMatches(regexp));
   }
   return record;
 }
Exemplo n.º 3
0
 public void report(JCDiagnostic diag) {
   if (!diag.isFlagSet(JCDiagnostic.DiagnosticFlag.NON_DEFERRABLE)
       && (filter == null || filter.accepts(diag))) {
     deferred.add(diag);
   } else {
     prev.report(diag);
   }
 }
    public boolean include(Object object) {
      if (object instanceof IResource) {
        IResource resource = (IResource) object;
        IPath path =
            IResource.FILE == resource.getType()
                ? resource.getFullPath()
                : resource.getFullPath().addTrailingSeparator();
        object = path.toPortableString();
      }

      for (Iterator iter = filters.iterator(); iter.hasNext(); ) {
        Filter filter = (Filter) iter.next();
        if (filter.matches(object)) {
          return filter.inclusive();
        }
      }
      return default_;
    }
Exemplo n.º 5
0
  /**
   * Sets the format of the input instances.
   *
   * @param instanceInfo an Instances object containing the input instance structure (any instances
   *     contained in the object are ignored - only the structure is required).
   * @return true if the outputFormat may be collected immediately
   * @exception Exception if the format couldn't be set successfully
   */
  public boolean setInputFormat(Instances instanceInfo) throws Exception {

    convertInfixToPostfix(new String(m_infixExpression));

    super.setInputFormat(instanceInfo);

    Instances outputFormat = new Instances(instanceInfo, 0);
    Attribute newAttribute;
    if (m_Debug) {
      newAttribute = new Attribute(m_postFixExpVector.toString());
    } else if (m_attributeName.compareTo("expression") != 0) {
      newAttribute = new Attribute(m_attributeName);
    } else {
      newAttribute = new Attribute(m_infixExpression);
    }
    outputFormat.insertAttributeAt(newAttribute, instanceInfo.numAttributes());
    setOutputFormat(outputFormat);
    return true;
  }
Exemplo n.º 6
0
  protected void process() {
    int i, j, len, ch, chunkLength;
    long progOff, progLen;
    float f1;

    // io
    AudioFile inF = null;
    AudioFile outF = null;
    AudioFileDescr inStream;
    AudioFileDescr outStream;
    FloatFile[] floatF = null;
    File tempFile[] = null;

    // buffers
    float[][] inBuf, outBuf;
    float[] win;
    float[] convBuf1, convBuf2;
    float[] tempFlt;

    int inChanNum, inLength, inputStep, outputStep, winSize;
    int transLen, skip, inputLen, outputLen, fltLen;
    int framesRead, framesWritten;
    float warp, a1, b0, b1, x0, x1, y0, y1, b0init;

    Param ampRef = new Param(1.0, Param.ABS_AMP); // transform-Referenz
    Param peakGain;
    float gain = 1.0f; // gain abs amp
    float maxAmp = 0.0f;

    PathField ggOutput;

    topLevel:
    try {

      // ---- open input, output ----

      // input
      inF = AudioFile.openAsRead(new File(pr.text[PR_INPUTFILE]));
      inStream = inF.getDescr();
      inChanNum = inStream.channels;
      inLength = (int) inStream.length;
      // this helps to prevent errors from empty files!
      if ((inLength * inChanNum) < 1) throw new EOFException(ERR_EMPTY);
      // .... check running ....
      if (!threadRunning) break topLevel;

      // output
      ggOutput = (PathField) gui.getItemObj(GG_OUTPUTFILE);
      if (ggOutput == null) throw new IOException(ERR_MISSINGPROP);
      outStream = new AudioFileDescr(inStream);
      ggOutput.fillStream(outStream);
      outF = AudioFile.openAsWrite(outStream);
      // .... check running ....
      if (!threadRunning) break topLevel;

      // ---- parameter inits ----

      warp =
          Math.max(-0.98f, Math.min(0.98f, (float) (pr.para[PR_WARP].val / 100))); // DAFx2000 'b'
      f1 = (1.0f - warp) / (1.0f + warp); // DAFx2000 (25)
      winSize = 32 << pr.intg[PR_FRAMESIZE]; // DAFx2000 'N'
      j = winSize >> 1;
      transLen = (int) (f1 * winSize + 0.5f); // DAFx2000 'P' (26)
      i = pr.intg[PR_OVERLAP] + 1;
      while (((float) transLen / (float) i) > j) i++;
      inputStep = (int) (((float) transLen / (float) i) + 0.5f); // DAFx2000 'L'
      fltLen = Math.max(winSize, transLen);
      // System.out.println( "inputStep "+inputStep+"; winSize "+winSize+"; transLen "+transLen+";
      // fltLen "+fltLen+"; warp "+warp+"; � "+f1 );
      win = Filter.createFullWindow(winSize, Filter.WIN_HANNING); // DAFx2000 (27)
      outputStep = inputStep;

      b0init = (float) Math.sqrt(1.0f - warp * warp);

      progOff = 0;
      progLen = (long) inLength * (2 + inChanNum); // + winSize;

      tempFlt = new float[fltLen];
      inputLen = winSize + inputStep;
      inBuf = new float[inChanNum][inputLen];
      outputLen = transLen + outputStep;
      outBuf = new float[inChanNum][outputLen];

      // normalization requires temp files
      if (pr.intg[PR_GAINTYPE] == GAIN_UNITY) {
        tempFile = new File[inChanNum];
        floatF = new FloatFile[inChanNum];
        for (ch = 0; ch < inChanNum; ch++) { // first zero them because an exception might be thrown
          tempFile[ch] = null;
          floatF[ch] = null;
        }
        for (ch = 0; ch < inChanNum; ch++) {
          tempFile[ch] = IOUtil.createTempFile();
          floatF[ch] = new FloatFile(tempFile[ch], GenericFile.MODE_OUTPUT);
        }
        progLen += (long) inLength;
      } else {
        gain = (float) ((Param.transform(pr.para[PR_GAIN], Param.ABS_AMP, ampRef, null)).val);
      }
      // .... check running ....
      if (!threadRunning) break topLevel;

      // ----==================== the real stuff ====================----

      framesRead = 0;
      framesWritten = 0;
      skip = 0;

      while (threadRunning && (framesWritten < inLength)) {

        chunkLength = Math.min(inputLen, inLength - framesRead + skip);
        // ---- read input chunk ----
        len = Math.max(0, chunkLength - skip);
        inF.readFrames(inBuf, skip, len);
        framesRead += len;
        progOff += len;
        //				off			+= len;
        // .... progress ....
        setProgression((float) progOff / (float) progLen);
        // .... check running ....
        if (!threadRunning) break topLevel;

        // zero padding
        if (chunkLength < inputLen) {
          for (ch = 0; ch < inChanNum; ch++) {
            convBuf1 = inBuf[ch];
            for (i = chunkLength; i < convBuf1.length; i++) {
              convBuf1[i] = 0.0f;
            }
          }
        }

        for (ch = 0; threadRunning && (ch < inChanNum); ch++) {
          convBuf1 = inBuf[ch];
          convBuf2 = outBuf[ch];

          for (i = 0, j = fltLen; i < winSize; i++) {
            tempFlt[--j] = convBuf1[i] * win[i];
          }
          while (j > 0) {
            tempFlt[--j] = 0.0f;
          }

          a1 = -warp; // inital allpass
          b0 = b0init;
          b1 = 0.0f;
          for (j = 0; j < transLen; j++) {
            x1 = 0.0f;
            y1 = 0.0f;

            //						for( i = 0; i < transLen; i++ ) {		// DAFx2000 (2 resp. 3)
            for (i = 0; i < fltLen; i++) { // DAFx2000 (2 resp. 3)
              x0 = tempFlt[i];
              y0 = b0 * x0 + b1 * x1 - a1 * y1;
              tempFlt[i] = y0; // (work with double precision while computing cascades)
              y1 = y0;
              x1 = x0;
            }

            a1 = -warp; // cascaded allpasses
            b0 = -warp;
            b1 = 1.0f;

            convBuf2[j] += (float) y1;
          }
          // .... progress ....
          progOff += chunkLength - skip;
          setProgression((float) progOff / (float) progLen);
        } // for channels
        // .... check running ....
        if (!threadRunning) break topLevel;

        chunkLength = Math.min(outputStep, inLength - framesWritten);
        // ---- write output chunk ----
        if (floatF != null) {
          for (ch = 0; ch < inChanNum; ch++) {
            floatF[ch].writeFloats(outBuf[ch], 0, chunkLength);
          }
          progOff += chunkLength;
          //					off			  += len;
          framesWritten += chunkLength;
          // .... progress ....
          setProgression((float) progOff / (float) progLen);

        } else {
          for (ch = 0; ch < inChanNum; ch++) {
            Util.mult(outBuf[ch], 0, chunkLength, gain);
          }
          outF.writeFrames(outBuf, 0, chunkLength);
          progOff += chunkLength;
          //					off			  += len;
          framesWritten += chunkLength;
          // .... progress ....
          setProgression((float) progOff / (float) progLen);
        }
        // .... check running ....
        if (!threadRunning) break topLevel;

        // check max amp
        for (ch = 0; ch < inChanNum; ch++) {
          convBuf1 = outBuf[ch];
          for (i = 0; i < chunkLength; i++) {
            f1 = Math.abs(convBuf1[i]);
            if (f1 > maxAmp) {
              maxAmp = f1;
            }
          }
        }

        // overlaps
        skip = winSize;
        for (ch = 0; ch < inChanNum; ch++) {
          System.arraycopy(inBuf[ch], inputStep, inBuf[ch], 0, winSize);
          convBuf1 = outBuf[ch];
          System.arraycopy(convBuf1, outputStep, convBuf1, 0, transLen);
          for (i = transLen; i < outputLen; ) {
            convBuf1[i++] = 0.0f;
          }
        }
      } // until framesWritten == outLength
      // .... check running ....
      if (!threadRunning) break topLevel;

      // ----==================== normalize output ====================----

      if (pr.intg[PR_GAINTYPE] == GAIN_UNITY) {
        peakGain = new Param((double) maxAmp, Param.ABS_AMP);
        gain =
            (float)
                (Param.transform(
                        pr.para[PR_GAIN],
                        Param.ABS_AMP,
                        new Param(1.0 / peakGain.val, peakGain.unit),
                        null))
                    .val;
        normalizeAudioFile(floatF, outF, inBuf, gain, 1.0f);
        maxAmp *= gain;

        for (ch = 0; ch < inChanNum; ch++) {
          floatF[ch].cleanUp();
          floatF[ch] = null;
          tempFile[ch].delete();
          tempFile[ch] = null;
        }
      }
      // .... check running ....
      if (!threadRunning) break topLevel;

      // ---- Finish ----

      outF.close();
      outF = null;
      outStream = null;
      inF.close();
      inF = null;
      inStream = null;
      inBuf = null;

      // inform about clipping/ low level
      handleClipping(maxAmp);
    } catch (IOException e1) {
      setError(e1);
    } catch (OutOfMemoryError e2) {
      inStream = null;
      outStream = null;
      inBuf = null;
      convBuf1 = null;
      convBuf2 = null;
      System.gc();

      setError(new Exception(ERR_MEMORY));
      ;
    }

    // ---- cleanup (topLevel) ----
    if (inF != null) {
      inF.cleanUp();
      inF = null;
    }
    if (outF != null) {
      outF.cleanUp();
      outF = null;
    }
    if (floatF != null) {
      for (ch = 0; ch < floatF.length; ch++) {
        if (floatF[ch] != null) {
          floatF[ch].cleanUp();
          floatF[ch] = null;
        }
        if (tempFile[ch] != null) {
          tempFile[ch].delete();
          tempFile[ch] = null;
        }
      }
    }
  } // process()
Exemplo n.º 7
0
  /**
   * Sets the format of the input instances.
   *
   * @param instanceInfo an Instances object containing the input instance structure (any instances
   *     contained in the object are ignored - only the structure is required).
   * @return true if the outputFormat may be collected immediately
   */
  public boolean setInputFormat(Instances instanceInfo) throws Exception {

    super.setInputFormat(instanceInfo);
    setOutputFormat(instanceInfo);
    return true;
  }
Exemplo n.º 8
0
  /**
   * Updates the config file during the upgrade process.
   *
   * @param configPath The original path to the file.
   * @param filter The filter to select entries. Only useful for modify change type.
   * @param changeType The change type which must be applied to ldif lines.
   * @param ldifLines The change record ldif lines. For ADD change type, the first line must be the
   *     dn. For DELETE change type, the first and only line must be the dn.
   * @throws IOException If an Exception occurs during the input output methods.
   * @return The changes number that have occurred.
   */
  static int updateConfigFile(
      final String configPath,
      final Filter filter,
      final ChangeOperationType changeType,
      final String... ldifLines)
      throws IOException {
    final File original = new File(configPath);
    final File copyConfig = File.createTempFile("copyConfig", ".tmp", original.getParentFile());

    int changeCount = 0;
    LDIFEntryReader entryReader = null;
    LDIFEntryWriter writer = null;
    try {
      final Schema schema = getUpgradeSchema();
      entryReader = new LDIFEntryReader(new FileInputStream(configPath)).setSchema(schema);

      writer = new LDIFEntryWriter(new FileOutputStream(copyConfig));
      writer.setWrapColumn(80);

      // Writes the header on the new file.
      writer.writeComment(INFO_CONFIG_FILE_HEADER.get());
      writer.setWrapColumn(0);

      boolean entryAlreadyExist = false;
      DN ldifDN = null;
      if (filter == null && (changeType == ADD || changeType == DELETE)) {
        // The first line should start with dn:
        ldifDN = DN.valueOf(ldifLines[0].replaceFirst("dn: ", ""));
      }
      final Filter f = filter != null ? filter : Filter.alwaysFalse();
      final Matcher matcher = f.matcher(schema);
      while (entryReader.hasNext()) {
        Entry entry = entryReader.readEntry();
        final DN entryDN = entry.getName();
        // Searching for the related entries
        if (changeType == MODIFY && matcher.matches(entry) == ConditionResult.TRUE) {
          try {
            final ModifyRequest mr =
                Requests.newModifyRequest(readLDIFLines(entryDN, changeType, ldifLines));
            entry = Entries.modifyEntryPermissive(entry, mr.getModifications());
            changeCount++;
            logger.debug(
                LocalizableMessage.raw("The following entry has been modified : %s", entryDN));
          } catch (Exception ex) {
            logger.error(LocalizableMessage.raw(ex.getMessage()));
          }
        }

        if (entryDN.equals(ldifDN)) {
          logger.debug(LocalizableMessage.raw("Entry %s found", entryDN));
          entryAlreadyExist = true;

          if (changeType == DELETE) {
            entry = null;
            changeCount++;
            logger.debug(
                LocalizableMessage.raw("The following entry has been deleted : %s", entryDN));
          }
        }

        if (entry != null) {
          writer.writeEntry(entry);
        }
      }

      if (changeType == ADD && !entryAlreadyExist) {
        final AddRequest ar = Requests.newAddRequest(ldifLines);
        writer.writeEntry(ar);
        logger.debug(
            LocalizableMessage.raw(
                "Entry successfully added %s in %s", ldifDN, original.getAbsolutePath()));
        changeCount++;
      }
    } catch (Exception ex) {
      throw new IOException(ex.getMessage());
    } finally {
      // The reader and writer must be close before renaming files.
      // Otherwise it causes exceptions under windows OS.
      StaticUtils.close(entryReader, writer);
    }

    try {
      // Renaming the file, overwriting previous one.
      rename(copyConfig, new File(configPath));
    } catch (IOException e) {
      logger.error(LocalizableMessage.raw(e.getMessage()));
      deleteRecursively(original);
      throw e;
    }

    return changeCount;
  }
Exemplo n.º 9
0
 public void report(JCDiagnostic diag) {
   if (filter == null || filter.accepts(diag)) deferred.add(diag);
   else prev.report(diag);
 }
Exemplo n.º 10
0
  /** @throws Exception */
  private void loadWorkload() throws Exception {
    final boolean debug = LOG.isDebugEnabled();
    // Workload Trace
    if (this.params.containsKey(PARAM_WORKLOAD)) {
      assert (this.catalog_db != null) : "Missing catalog!";
      String path = new File(this.params.get(PARAM_WORKLOAD)).getAbsolutePath();

      boolean weightedTxns = this.getBooleanParam(PARAM_WORKLOAD_XACT_WEIGHTS, false);
      if (debug) LOG.debug("Use Transaction Weights in Limits: " + weightedTxns);

      // This will prune out duplicate trace records...
      if (params.containsKey(PARAM_WORKLOAD_REMOVE_DUPES)) {
        DuplicateTraceFilter filter = new DuplicateTraceFilter();
        this.workload_filter =
            (this.workload_filter != null ? filter.attach(this.workload_filter) : filter);
        if (debug) LOG.debug("Attached " + filter.debugImpl());
      }

      // TRANSACTION OFFSET
      if (params.containsKey(PARAM_WORKLOAD_XACT_OFFSET)) {
        this.workload_xact_offset = Long.parseLong(params.get(PARAM_WORKLOAD_XACT_OFFSET));
        ProcedureLimitFilter filter =
            new ProcedureLimitFilter(-1l, this.workload_xact_offset, weightedTxns);
        // Important! The offset should go in the front!
        this.workload_filter =
            (this.workload_filter != null ? filter.attach(this.workload_filter) : filter);
        if (debug) LOG.debug("Attached " + filter.debugImpl());
      }

      // BASE PARTITIONS
      if (params.containsKey(PARAM_WORKLOAD_RANDOM_PARTITIONS)
          || params.containsKey(PARAM_WORKLOAD_BASE_PARTITIONS)) {
        BasePartitionTxnFilter filter =
            new BasePartitionTxnFilter(new PartitionEstimator(catalog_db));

        // FIXED LIST
        if (params.containsKey(PARAM_WORKLOAD_BASE_PARTITIONS)) {
          for (String p_str : this.getParam(PARAM_WORKLOAD_BASE_PARTITIONS).split(",")) {
            workload_base_partitions.add(Integer.valueOf(p_str));
          } // FOR
          // RANDOM
        } else {
          double factor = this.getDoubleParam(PARAM_WORKLOAD_RANDOM_PARTITIONS);
          List<Integer> all_partitions =
              new ArrayList<Integer>(CatalogUtil.getAllPartitionIds(catalog_db));
          Collections.shuffle(all_partitions, new Random());
          workload_base_partitions.addAll(
              all_partitions.subList(0, (int) (all_partitions.size() * factor)));
        }
        filter.addPartitions(workload_base_partitions);
        this.workload_filter =
            (this.workload_filter != null ? this.workload_filter.attach(filter) : filter);
        if (debug) LOG.debug("Attached " + filter.debugImpl());
      }

      // Txn Limit
      this.workload_xact_limit = this.getLongParam(PARAM_WORKLOAD_XACT_LIMIT);
      Histogram<String> proc_histogram = null;

      // Include/exclude procedures from the traces
      if (params.containsKey(PARAM_WORKLOAD_PROC_INCLUDE)
          || params.containsKey(PARAM_WORKLOAD_PROC_EXCLUDE)) {
        Filter filter = new ProcedureNameFilter(weightedTxns);

        // INCLUDE
        String temp = params.get(PARAM_WORKLOAD_PROC_INCLUDE);
        if (temp != null && !temp.equals(ProcedureNameFilter.INCLUDE_ALL)) {

          // We can take the counts for PROC_INCLUDE and scale them
          // with the multiplier
          double multiplier = 1.0d;
          if (this.hasDoubleParam(PARAM_WORKLOAD_PROC_INCLUDE_MULTIPLIER)) {
            multiplier = this.getDoubleParam(PARAM_WORKLOAD_PROC_INCLUDE_MULTIPLIER);
            if (debug) LOG.debug("Workload Procedure Multiplier: " + multiplier);
          }

          // Default Txn Frequencies
          String procinclude = params.get(PARAM_WORKLOAD_PROC_INCLUDE);
          if (procinclude.equalsIgnoreCase("default")) {
            procinclude =
                AbstractProjectBuilder.getProjectBuilder(catalog_type)
                    .getTransactionFrequencyString();
          }

          Map<String, Integer> limits = new HashMap<String, Integer>();
          int total_unlimited = 0;
          int total = 0;
          for (String proc_name : procinclude.split(",")) {
            int limit = -1;
            // Check if there is a limit for this procedure
            if (proc_name.contains(":")) {
              String pieces[] = proc_name.split(":");
              proc_name = pieces[0];
              limit = (int) Math.round(Integer.parseInt(pieces[1]) * multiplier);
            }

            if (limit < 0) {
              if (proc_histogram == null) {
                if (debug) LOG.debug("Generating procedure histogram from workload file");
                proc_histogram = WorkloadUtil.getProcedureHistogram(new File(path));
              }
              limit = (int) proc_histogram.get(proc_name, 0);
              total_unlimited += limit;
            } else {
              total += limit;
            }
            limits.put(proc_name, limit);
          } // FOR
          // If we have a workload limit and some txns that we want
          // to get unlimited
          // records from, then we want to modify the other txns so
          // that we fill in the "gap"
          if (this.workload_xact_limit != null && total_unlimited > 0) {
            int remaining = this.workload_xact_limit.intValue() - total - total_unlimited;
            if (remaining > 0) {
              for (Entry<String, Integer> e : limits.entrySet()) {
                double ratio = e.getValue() / (double) total;
                e.setValue((int) Math.ceil(e.getValue() + (ratio * remaining)));
              } // FOR
            }
          }

          Histogram<String> proc_multiplier_histogram = null;
          if (debug) {
            if (proc_histogram != null) LOG.debug("Full Workload Histogram:\n" + proc_histogram);
            proc_multiplier_histogram = new Histogram<String>();
          }
          total = 0;
          for (Entry<String, Integer> e : limits.entrySet()) {
            if (debug) proc_multiplier_histogram.put(e.getKey(), e.getValue());
            ((ProcedureNameFilter) filter).include(e.getKey(), e.getValue());
            total += e.getValue();
          } // FOR
          if (debug)
            LOG.debug("Multiplier Histogram [total=" + total + "]:\n" + proc_multiplier_histogram);
        }

        // EXCLUDE
        temp = params.get(PARAM_WORKLOAD_PROC_EXCLUDE);
        if (temp != null) {
          for (String proc_name : params.get(PARAM_WORKLOAD_PROC_EXCLUDE).split(",")) {
            ((ProcedureNameFilter) filter).exclude(proc_name);
          } // FOR
        }

        // Sampling!!
        if (this.getBooleanParam(PARAM_WORKLOAD_PROC_SAMPLE, false)) {
          if (debug) LOG.debug("Attaching sampling filter");
          if (proc_histogram == null)
            proc_histogram = WorkloadUtil.getProcedureHistogram(new File(path));
          Map<String, Integer> proc_includes = ((ProcedureNameFilter) filter).getProcIncludes();
          SamplingFilter sampling_filter = new SamplingFilter(proc_includes, proc_histogram);
          filter = sampling_filter;
          if (debug) LOG.debug("Workload Procedure Histogram:\n" + proc_histogram);
        }

        // Attach our new filter to the chain (or make it the head if
        // it's the first one)
        this.workload_filter =
            (this.workload_filter != null ? this.workload_filter.attach(filter) : filter);
        if (debug) LOG.debug("Attached " + filter.debugImpl());
      }

      // TRANSACTION LIMIT
      if (this.workload_xact_limit != null) {
        ProcedureLimitFilter filter =
            new ProcedureLimitFilter(this.workload_xact_limit, weightedTxns);
        this.workload_filter =
            (this.workload_filter != null ? this.workload_filter.attach(filter) : filter);
        if (debug) LOG.debug("Attached " + filter.debugImpl());
      }

      // QUERY LIMIT
      if (params.containsKey(PARAM_WORKLOAD_QUERY_LIMIT)) {
        this.workload_query_limit = Long.parseLong(params.get(PARAM_WORKLOAD_QUERY_LIMIT));
        QueryLimitFilter filter = new QueryLimitFilter(this.workload_query_limit);
        this.workload_filter =
            (this.workload_filter != null ? this.workload_filter.attach(filter) : filter);
      }

      if (this.workload_filter != null && debug)
        LOG.debug("Workload Filters: " + this.workload_filter.toString());
      this.workload = new Workload(this.catalog);
      this.workload.load(path, this.catalog_db, this.workload_filter);
      this.workload_path = new File(path).getAbsolutePath();
      if (this.workload_filter != null) this.workload_filter.reset();
    }

    // Workload Statistics
    if (this.catalog_db != null) {
      this.stats = new WorkloadStatistics(this.catalog_db);
      if (this.params.containsKey(PARAM_STATS)) {
        String path = this.params.get(PARAM_STATS);
        if (debug) LOG.debug("Loading in workload statistics from '" + path + "'");
        this.stats_path = new File(path).getAbsolutePath();
        try {
          this.stats.load(path, this.catalog_db);
        } catch (Throwable ex) {
          throw new RuntimeException("Failed to load stats file '" + this.stats_path + "'", ex);
        }
      }

      // Scaling
      if (this.params.containsKey(PARAM_STATS_SCALE_FACTOR)) {
        double scale_factor = this.getDoubleParam(PARAM_STATS_SCALE_FACTOR);
        LOG.info("Scaling TableStatistics: " + scale_factor);
        AbstractTableStatisticsGenerator generator =
            AbstractTableStatisticsGenerator.factory(
                this.catalog_db, this.catalog_type, scale_factor);
        generator.apply(this.stats);
      }
    }
  }