Пример #1
0
  @Override
  public void loadValueFromXML(Element xmlElement) {

    PeakList[] currentDataPeakLists =
        MZmineCore.getProjectManager().getCurrentProject().getPeakLists();

    PeakListsSelectionType selectionType;
    final String attrValue = xmlElement.getAttribute("type");

    if (Strings.isNullOrEmpty(attrValue))
      selectionType = PeakListsSelectionType.GUI_SELECTED_PEAKLISTS;
    else selectionType = PeakListsSelectionType.valueOf(xmlElement.getAttribute("type"));

    ArrayList<Object> newValues = new ArrayList<Object>();

    NodeList items = xmlElement.getElementsByTagName("specific_peak_list");
    for (int i = 0; i < items.getLength(); i++) {
      String itemString = items.item(i).getTextContent();
      for (PeakList df : currentDataPeakLists) {
        if (df.getName().equals(itemString)) newValues.add(df);
      }
    }
    PeakList specificPeakLists[] = newValues.toArray(new PeakList[0]);

    String namePattern = null;
    items = xmlElement.getElementsByTagName("name_pattern");
    for (int i = 0; i < items.getLength(); i++) {
      namePattern = items.item(i).getTextContent();
    }

    this.value = new PeakListsSelection();
    this.value.setSelectionType(selectionType);
    this.value.setSpecificPeakLists(specificPeakLists);
    this.value.setNamePattern(namePattern);
  }
Пример #2
0
  /** Returns a peak list with the peaks which intensity is above the parameter "intensity" */
  PeakList getIntensityThresholdPeakList(double intensity) {
    PeakList selectedPeakList = (PeakList) peakListSelector.getSelectedItem();
    if (selectedPeakList == null) return null;
    SimplePeakList newList =
        new SimplePeakList(selectedPeakList.getName(), selectedPeakList.getRawDataFiles());

    for (PeakListRow peakRow : selectedPeakList.getRows()) {
      Feature peak = peakRow.getPeak(dataFile);
      if (peak == null) continue;
      if (peak.getRawDataPointsIntensityRange().upperEndpoint() > intensity) {
        newList.addRow(peakRow);
      }
    }
    return newList;
  }
Пример #3
0
  @Override
  public void saveValueToXML(Element xmlElement) {
    if (value == null) return;
    Document parentDocument = xmlElement.getOwnerDocument();
    xmlElement.setAttribute("type", value.getSelectionType().name());

    if (value.getSpecificPeakLists() != null) {
      for (PeakList item : value.getSpecificPeakLists()) {
        Element newElement = parentDocument.createElement("specific_peak_list");
        newElement.setTextContent(item.getName());
        xmlElement.appendChild(newElement);
      }
    }

    if (value.getNamePattern() != null) {
      Element newElement = parentDocument.createElement("name_pattern");
      newElement.setTextContent(value.getNamePattern());
      xmlElement.appendChild(newElement);
    }
  }
  /** @see java.awt.event.MouseListener#mouseReleased(java.awt.event.MouseEvent) */
  public void mouseReleased(MouseEvent e) {

    if (columnBeingResized == null) return;

    ColumnSettingParameter<CommonColumnType> csPar =
        parameters.getParameter(PeakListTableParameters.commonColumns);

    ColumnSettingParameter<DataFileColumnType> dfPar =
        parameters.getParameter(PeakListTableParameters.dataFileColumns);

    final int modelIndex = columnBeingResized.getModelIndex();
    final int newWidth = columnBeingResized.getPreferredWidth();

    final int numOfCommonColumns = CommonColumnType.values().length;
    final int numOfDataFileColumns = DataFileColumnType.values().length;

    if (modelIndex < numOfCommonColumns) {
      csPar.setColumnWidth(modelIndex, newWidth);
    } else {
      int dataFileColumnIndex = (modelIndex - numOfCommonColumns) % numOfDataFileColumns;
      dfPar.setColumnWidth(dataFileColumnIndex, newWidth);

      // set same width to other data file columns of this type
      for (int dataFileIndex = peakList.getNumberOfRawDataFiles() - 1;
          dataFileIndex >= 0;
          dataFileIndex--) {
        int columnIndex =
            numOfCommonColumns + (dataFileIndex * numOfDataFileColumns) + dataFileColumnIndex;

        TableColumn col = this.getColumnByModelIndex(columnIndex);

        int currentWidth = col.getPreferredWidth();

        if (currentWidth != newWidth) {
          col.setPreferredWidth(newWidth);
        }
      }
    }
  }
Пример #5
0
  /** Returns a peak list with the top peaks defined by the parameter "threshold" */
  PeakList getTopThresholdPeakList(int threshold) {

    PeakList selectedPeakList = (PeakList) peakListSelector.getSelectedItem();
    if (selectedPeakList == null) return null;
    SimplePeakList newList =
        new SimplePeakList(selectedPeakList.getName(), selectedPeakList.getRawDataFiles());

    Vector<PeakListRow> peakRows = new Vector<PeakListRow>();

    Range<Double> mzRange = selectedPeakList.getRowsMZRange();
    Range<Double> rtRange = selectedPeakList.getRowsRTRange();

    PeakThresholdMode selectedPeakOption = (PeakThresholdMode) thresholdCombo.getSelectedItem();
    if (selectedPeakOption == PeakThresholdMode.TOP_PEAKS_AREA) {
      XYPlot xyPlot = masterFrame.getPlot().getXYPlot();
      org.jfree.data.Range yAxis = xyPlot.getRangeAxis().getRange();
      org.jfree.data.Range xAxis = xyPlot.getDomainAxis().getRange();
      rtRange = Range.closed(xAxis.getLowerBound(), xAxis.getUpperBound());
      mzRange = Range.closed(yAxis.getLowerBound(), yAxis.getUpperBound());
    }

    for (PeakListRow peakRow : selectedPeakList.getRows()) {
      if (mzRange.contains(peakRow.getAverageMZ()) && rtRange.contains(peakRow.getAverageRT())) {
        peakRows.add(peakRow);
      }
    }

    Collections.sort(
        peakRows, new PeakListRowSorter(SortingProperty.Intensity, SortingDirection.Descending));

    if (threshold > peakRows.size()) threshold = peakRows.size();
    for (int i = 0; i < threshold; i++) {
      newList.addRow(peakRows.elementAt(i));
    }
    return newList;
  }
Пример #6
0
  /** @see Runnable#run() */
  public void run() {

    if ((mzWeight == 0) && (rtWeight == 0)) {
      setStatus(TaskStatus.ERROR);
      errorMessage = "Cannot run alignment, all the weight parameters are zero";
      return;
    }

    setStatus(TaskStatus.PROCESSING);
    logger.info("Running join aligner");

    // Remember how many rows we need to process. Each row will be processed
    // twice, first for score calculation, second for actual alignment.
    for (int i = 0; i < peakLists.length; i++) {
      totalRows += peakLists[i].getNumberOfRows() * 2;
    }

    // Collect all data files
    Vector<RawDataFile> allDataFiles = new Vector<RawDataFile>();
    for (PeakList peakList : peakLists) {

      for (RawDataFile dataFile : peakList.getRawDataFiles()) {

        // Each data file can only have one column in aligned peak list
        if (allDataFiles.contains(dataFile)) {
          setStatus(TaskStatus.ERROR);
          errorMessage =
              "Cannot run alignment, because file "
                  + dataFile
                  + " is present in multiple peak lists";
          return;
        }

        allDataFiles.add(dataFile);
      }
    }

    // Create a new aligned peak list
    alignedPeakList = new SimplePeakList(peakListName, allDataFiles.toArray(new RawDataFile[0]));

    // Iterate source peak lists
    for (PeakList peakList : peakLists) {

      // Create a sorted set of scores matching
      TreeSet<RowVsRowScore> scoreSet = new TreeSet<RowVsRowScore>();

      PeakListRow allRows[] = peakList.getRows();

      // Calculate scores for all possible alignments of this row
      for (PeakListRow row : allRows) {

        if (isCanceled()) return;

        // Calculate limits for a row with which the row can be aligned
        Range mzRange = mzTolerance.getToleranceRange(row.getAverageMZ());
        Range rtRange = rtTolerance.getToleranceRange(row.getAverageRT());

        // Get all rows of the aligned peaklist within parameter limits
        PeakListRow candidateRows[] = alignedPeakList.getRowsInsideScanAndMZRange(rtRange, mzRange);

        // Calculate scores and store them
        for (PeakListRow candidate : candidateRows) {

          if (sameChargeRequired) {
            if (!PeakUtils.compareChargeState(row, candidate)) continue;
          }

          if (sameIDRequired) {
            if (!PeakUtils.compareIdentities(row, candidate)) continue;
          }

          if (compareIsotopePattern) {
            IsotopePattern ip1 = row.getBestIsotopePattern();
            IsotopePattern ip2 = candidate.getBestIsotopePattern();

            if ((ip1 != null) && (ip2 != null)) {
              ParameterSet isotopeParams =
                  parameters
                      .getParameter(JoinAlignerParameters.compareIsotopePattern)
                      .getEmbeddedParameters();

              if (!IsotopePatternScoreCalculator.checkMatch(ip1, ip2, isotopeParams)) {
                continue;
              }
            }
          }

          RowVsRowScore score =
              new RowVsRowScore(
                  row, candidate, mzRange.getSize() / 2, mzWeight, rtRange.getSize() / 2, rtWeight);

          scoreSet.add(score);
        }

        processedRows++;
      }

      // Create a table of mappings for best scores
      Hashtable<PeakListRow, PeakListRow> alignmentMapping =
          new Hashtable<PeakListRow, PeakListRow>();

      // Iterate scores by descending order
      Iterator<RowVsRowScore> scoreIterator = scoreSet.iterator();
      while (scoreIterator.hasNext()) {

        RowVsRowScore score = scoreIterator.next();

        // Check if the row is already mapped
        if (alignmentMapping.containsKey(score.getPeakListRow())) continue;

        // Check if the aligned row is already filled
        if (alignmentMapping.containsValue(score.getAlignedRow())) continue;

        alignmentMapping.put(score.getPeakListRow(), score.getAlignedRow());
      }

      // Align all rows using mapping
      for (PeakListRow row : allRows) {

        PeakListRow targetRow = alignmentMapping.get(row);

        // If we have no mapping for this row, add a new one
        if (targetRow == null) {
          targetRow = new SimplePeakListRow(newRowID);
          newRowID++;
          alignedPeakList.addRow(targetRow);
        }

        // Add all peaks from the original row to the aligned row
        for (RawDataFile file : row.getRawDataFiles()) {
          targetRow.addPeak(file, row.getPeak(file));
        }

        // Add all non-existing identities from the original row to the
        // aligned row
        PeakUtils.copyPeakListRowProperties(row, targetRow);

        processedRows++;
      }
    } // Next peak list

    // Add new aligned peak list to the project
    MZmineProject currentProject = MZmineCore.getCurrentProject();
    currentProject.addPeakList(alignedPeakList);

    // Add task description to peakList
    alignedPeakList.addDescriptionOfAppliedTask(
        new SimplePeakListAppliedMethod("Join aligner", parameters));

    logger.info("Finished join aligner");
    setStatus(TaskStatus.FINISHED);
  }
Пример #7
0
  /**
   * Filter the peak list.
   *
   * @param peakList peak list to filter.
   * @return a new peak list with entries of the original peak list that pass the filtering.
   */
  private PeakList filterPeakList(final PeakList peakList) {

    // Make a copy of the peakList
    final PeakList newPeakList =
        new SimplePeakList(
            peakList.getName()
                + ' '
                + parameters.getParameter(RowsFilterParameters.SUFFIX).getValue(),
            peakList.getRawDataFiles());

    // Get parameters - which filters are active
    final boolean filterByDuration =
        parameters.getParameter(PeakFilterParameters.PEAK_DURATION).getValue();
    final boolean filterByArea = parameters.getParameter(PeakFilterParameters.PEAK_AREA).getValue();
    final boolean filterByHeight =
        parameters.getParameter(PeakFilterParameters.PEAK_HEIGHT).getValue();
    final boolean filterByDatapoints =
        parameters.getParameter(PeakFilterParameters.PEAK_DATAPOINTS).getValue();
    final boolean filterByFWHM = parameters.getParameter(PeakFilterParameters.PEAK_FWHM).getValue();
    final boolean filterByTailingFactor =
        parameters.getParameter(PeakFilterParameters.PEAK_TAILINGFACTOR).getValue();
    final boolean filterByAsymmetryFactor =
        parameters.getParameter(PeakFilterParameters.PEAK_ASYMMETRYFACTOR).getValue();

    // Loop through all rows in peak list
    final PeakListRow[] rows = peakList.getRows();
    totalRows = rows.length;
    for (processedRows = 0; !isCanceled() && processedRows < totalRows; processedRows++) {
      final PeakListRow row = rows[processedRows];
      final RawDataFile[] rawdatafiles = row.getRawDataFiles();
      int totalRawDataFiles = rawdatafiles.length;
      boolean[] keepPeak = new boolean[totalRawDataFiles];

      for (int i = 0; i < totalRawDataFiles; i++) {
        // Peak values
        keepPeak[i] = true;
        final Feature peak = row.getPeak(rawdatafiles[i]);
        final double peakDuration =
            peak.getRawDataPointsRTRange().upperEndpoint()
                - peak.getRawDataPointsRTRange().lowerEndpoint();
        final double peakArea = peak.getArea();
        final double peakHeight = peak.getHeight();
        final int peakDatapoints = peak.getScanNumbers().length;

        Double peakFWHM = peak.getFWHM();
        Double peakTailingFactor = peak.getTailingFactor();
        Double peakAsymmetryFactor = peak.getAsymmetryFactor();
        if (peakFWHM == null) {
          peakFWHM = -1.0;
        }
        if (peakTailingFactor == null) {
          peakTailingFactor = -1.0;
        }
        if (peakAsymmetryFactor == null) {
          peakAsymmetryFactor = -1.0;
        }

        // Check Duration
        if (filterByDuration) {
          final Range<Double> durationRange =
              parameters
                  .getParameter(PeakFilterParameters.PEAK_DURATION)
                  .getEmbeddedParameter()
                  .getValue();
          if (!durationRange.contains(peakDuration)) {
            // Mark peak to be removed
            keepPeak[i] = false;
          }
        }

        // Check Area
        if (filterByArea) {
          final Range<Double> areaRange =
              parameters
                  .getParameter(PeakFilterParameters.PEAK_AREA)
                  .getEmbeddedParameter()
                  .getValue();
          if (!areaRange.contains(peakArea)) {
            // Mark peak to be removed
            keepPeak[i] = false;
          }
        }

        // Check Height
        if (filterByHeight) {
          final Range<Double> heightRange =
              parameters
                  .getParameter(PeakFilterParameters.PEAK_HEIGHT)
                  .getEmbeddedParameter()
                  .getValue();
          if (!heightRange.contains(peakHeight)) {
            // Mark peak to be removed
            keepPeak[i] = false;
          }
        }

        // Check # Data Points
        if (filterByDatapoints) {
          final Range<Integer> datapointsRange =
              parameters
                  .getParameter(PeakFilterParameters.PEAK_DATAPOINTS)
                  .getEmbeddedParameter()
                  .getValue();
          if (!datapointsRange.contains(peakDatapoints)) {
            // Mark peak to be removed
            keepPeak[i] = false;
          }
        }

        // Check FWHM
        if (filterByFWHM) {
          final Range<Double> fwhmRange =
              parameters
                  .getParameter(PeakFilterParameters.PEAK_FWHM)
                  .getEmbeddedParameter()
                  .getValue();
          if (!fwhmRange.contains(peakFWHM)) {
            // Mark peak to be removed
            keepPeak[i] = false;
          }
        }

        // Check Tailing Factor
        if (filterByTailingFactor) {
          final Range<Double> tailingRange =
              parameters
                  .getParameter(PeakFilterParameters.PEAK_TAILINGFACTOR)
                  .getEmbeddedParameter()
                  .getValue();
          if (!tailingRange.contains(peakTailingFactor)) {
            // Mark peak to be removed
            keepPeak[i] = false;
          }
        }

        // Check height
        if (filterByAsymmetryFactor) {
          final Range<Double> asymmetryRange =
              parameters
                  .getParameter(PeakFilterParameters.PEAK_ASYMMETRYFACTOR)
                  .getEmbeddedParameter()
                  .getValue();
          if (!asymmetryRange.contains(peakAsymmetryFactor)) {
            // Mark peak to be removed
            keepPeak[i] = false;
          }
        }
      }

      newPeakList.addRow(copyPeakRow(row, keepPeak));
    }

    return newPeakList;
  }
  public void createColumns() {

    // clear column groups
    ColumnGroup groups[] = header.getColumnGroups();
    if (groups != null) {
      for (ColumnGroup group : groups) {
        header.removeColumnGroup(group);
      }
    }

    // clear the column model
    while (getColumnCount() > 0) {
      TableColumn col = getColumn(0);
      removeColumn(col);
    }

    // create the "average" group
    ColumnGroup averageGroup = new ColumnGroup("Average");
    header.addColumnGroup(averageGroup);

    JTextField editorField = new JTextField();
    editorField.setFont(editFont);
    DefaultCellEditor defaultEditor = new DefaultCellEditor(editorField);

    ColumnSettingParameter<CommonColumnType> csPar =
        parameters.getParameter(PeakListTableParameters.commonColumns);
    CommonColumnType visibleCommonColumns[] = csPar.getValue();

    // This is a workaround for a bug - we need to always show the ID, m/z
    // and RT columns, otherwise manual editing of peak identities does not
    // work.
    ArrayList<CommonColumnType> commonColumnsList =
        new ArrayList<>(Arrays.asList(visibleCommonColumns));
    commonColumnsList.remove(CommonColumnType.ROWID);
    commonColumnsList.remove(CommonColumnType.AVERAGEMZ);
    commonColumnsList.remove(CommonColumnType.AVERAGERT);
    commonColumnsList.add(0, CommonColumnType.ROWID);
    commonColumnsList.add(1, CommonColumnType.AVERAGEMZ);
    commonColumnsList.add(2, CommonColumnType.AVERAGERT);

    visibleCommonColumns = commonColumnsList.toArray(visibleCommonColumns);

    ColumnSettingParameter<DataFileColumnType> dfPar =
        parameters.getParameter(PeakListTableParameters.dataFileColumns);
    DataFileColumnType visibleDataFileColumns[] = dfPar.getValue();

    for (int i = 0; i < visibleCommonColumns.length; i++) {

      CommonColumnType commonColumn = visibleCommonColumns[i];
      int modelIndex = Arrays.asList(CommonColumnType.values()).indexOf(commonColumn);

      TableColumn newColumn = new TableColumn(modelIndex);
      newColumn.setHeaderValue(commonColumn.getColumnName());
      newColumn.setIdentifier(commonColumn);

      switch (commonColumn) {
        case AVERAGEMZ:
          newColumn.setCellRenderer(mzRenderer);
          break;
        case AVERAGERT:
          newColumn.setCellRenderer(rtRenderer);
          break;
        case IDENTITY:
          newColumn.setCellRenderer(identityRenderer);
          break;
        case COMMENT:
          newColumn.setCellRenderer(defaultRendererLeft);
          newColumn.setCellEditor(defaultEditor);
          break;
        case PEAKSHAPE:
          newColumn.setCellRenderer(peakShapeRenderer);
          break;
        default:
          newColumn.setCellRenderer(defaultRenderer);
      }

      this.addColumn(newColumn);
      newColumn.setPreferredWidth(csPar.getColumnWidth(modelIndex));
      if ((commonColumn == CommonColumnType.AVERAGEMZ)
          || (commonColumn == CommonColumnType.AVERAGERT)) {
        averageGroup.add(newColumn);
      }
    }

    for (int i = 0; i < peakList.getNumberOfRawDataFiles(); i++) {

      RawDataFile dataFile = peakList.getRawDataFile(i);
      ColumnGroup fileGroup = new ColumnGroup(dataFile.getName());
      header.addColumnGroup(fileGroup);

      for (int j = 0; j < visibleDataFileColumns.length; j++) {

        DataFileColumnType dataFileColumn = visibleDataFileColumns[j];
        int dataFileColumnIndex =
            Arrays.asList(DataFileColumnType.values()).indexOf(dataFileColumn);
        int modelIndex =
            CommonColumnType.values().length
                + (i * DataFileColumnType.values().length)
                + dataFileColumnIndex;

        TableColumn newColumn = new TableColumn(modelIndex);
        newColumn.setHeaderValue(dataFileColumn.getColumnName());
        newColumn.setIdentifier(dataFileColumn);

        switch (dataFileColumn) {
          case MZ:
            newColumn.setCellRenderer(mzRenderer);
            break;
          case PEAKSHAPE:
            newColumn.setCellRenderer(peakShapeRenderer);
            break;
          case STATUS:
            newColumn.setCellRenderer(peakStatusRenderer);
            break;
          case RT:
            newColumn.setCellRenderer(rtRenderer);
            break;
          case RT_START:
            newColumn.setCellRenderer(rtRenderer);
            break;
          case RT_END:
            newColumn.setCellRenderer(rtRenderer);
            break;
          case DURATION:
            newColumn.setCellRenderer(rtRenderer);
            break;
          case HEIGHT:
            newColumn.setCellRenderer(intensityRenderer);
            break;
          case AREA:
            newColumn.setCellRenderer(intensityRenderer);
            break;
          default:
            newColumn.setCellRenderer(defaultRenderer);
            break;
        }

        this.addColumn(newColumn);
        newColumn.setPreferredWidth(dfPar.getColumnWidth(dataFileColumnIndex));
        fileGroup.add(newColumn);
      }
    }
  }