/** {@inheritDoc} */ @Override protected void loadValidatedSettingsFrom(final NodeSettingsRO settings) throws InvalidSettingsException { foodFile.loadSettingsFrom(settings); outPath.loadSettingsFrom(settings); method.loadSettingsFrom(settings); }
/** {@inheritDoc} */ @Override protected BufferedDataTable[] execute( final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception { GenerateFoodSensitivitySpecificity.exec( foodFile.getStringValue(), outPath.getStringValue(), method.getStringValue().equals(SPEARMAN_METHOD), exec); return new BufferedDataTable[] {}; }
protected XMLWriterNodeDialog() { super(); addDialogComponent( new DialogComponentColumnNameSelection( new SettingsModelString(XMLWriterNodeModel.CFGKEY_XML_COL, "XML Data"), "XML Column", 0, true, false, new ColumnFilter() { @Override public boolean includeColumn(DataColumnSpec colSpec) { if (colSpec != null && colSpec.getType().equals(XMLCell.TYPE)) { return true; } return false; } @Override public String allFilteredMsg() { return "No XML columns available for selection!"; } })); createNewGroup("Filename conventions"); final SettingsModelBoolean sb_use_rowid = new SettingsModelBoolean(XMLWriterNodeModel.CFGKEY_USE_ROWID, true); final SettingsModelString sb_basename = new SettingsModelString(XMLWriterNodeModel.CFGKEY_BASENAME, "document"); sb_basename.setEnabled(!sb_use_rowid.getBooleanValue()); addDialogComponent( new DialogComponentString( new SettingsModelString(XMLWriterNodeModel.CFGKEY_EXTN, ".xml"), "Filename extension")); addDialogComponent(new DialogComponentBoolean(sb_use_rowid, "Use Row ID?")); addDialogComponent(new DialogComponentString(sb_basename, "Basename")); sb_use_rowid.addChangeListener( new ChangeListener() { @Override public void stateChanged(ChangeEvent arg0) { sb_basename.setEnabled(!sb_use_rowid.getBooleanValue()); } }); createNewGroup("Save XML files to folder..."); addDialogComponent( new DialogComponentString( new SettingsModelString(XMLWriterNodeModel.CFGKEY_FOLDER, "c:/temp"), "Folder to save to...")); }
private UnaryOutputOperation<ImgPlus<T>, ImgPlus<V>> getSlidingOperation( final ImgPlus<T> img, final V type) { final Shape neighborhood = NeighborhoodType.getNeighborhood( NeighborhoodType.valueOf(m_neighborhoodType.getStringValue()), m_intervalExtend.getIntValue()); final OutOfBoundsFactory<T, ImgPlus<T>> outStrat = OutOfBoundsStrategyFactory.<T, ImgPlus<T>>getStrategy( m_outOfBoundsStrategy.getStringValue(), img.firstElement()); return getSlidingOperation(img, type, neighborhood, outStrat); }
private DataColumnSpec[] getListSpec() { DataColumnSpec[] listSpecs = new DataColumnSpec[8]; String columnName = "mutual info. " + reference.getStringValue() + " - " + library.getStringValue(); listSpecs[0] = new DataColumnSpecCreator("Parameter name", StringCell.TYPE).createSpec(); listSpecs[1] = new DataColumnSpecCreator(columnName, DoubleCell.TYPE).createSpec(); listSpecs[2] = new DataColumnSpecCreator("sigma", DoubleCell.TYPE).createSpec(); listSpecs[3] = new DataColumnSpecCreator("bias", DoubleCell.TYPE).createSpec(); listSpecs[4] = new DataColumnSpecCreator("N bins x", DoubleCell.TYPE).createSpec(); listSpecs[5] = new DataColumnSpecCreator("N bins y", DoubleCell.TYPE).createSpec(); listSpecs[6] = new DataColumnSpecCreator("log-base", DoubleCell.TYPE).createSpec(); listSpecs[7] = new DataColumnSpecCreator("method", StringCell.TYPE).createSpec(); return listSpecs; }
private void addLabelingOptions() { setHorizontalPlacement(true); m_useCustomTrackPrefixModel.addChangeListener( e -> m_customTrackPrefixModel.setEnabled(m_useCustomTrackPrefixModel.getBooleanValue())); addDialogComponent( new DialogComponentBoolean( m_useCustomTrackPrefixModel, "Tracks are labeled with a custom prefix")); m_customTrackPrefixModel.setEnabled(false); addDialogComponent(new DialogComponentString(m_customTrackPrefixModel, "Prefix:")); setHorizontalPlacement(false); }
/** {@inheritDoc} */ @Override protected void validateSettings(final NodeSettingsRO settings) throws InvalidSettingsException { m_email.validateSettings(settings); m_seq_col.validateSettings(settings); m_accsn_col.validateSettings(settings); m_alignment_type.validateSettings(settings); }
/** {@inheritDoc} */ @Override protected void saveSettingsTo(final NodeSettingsWO settings) { m_email.saveSettingsTo(settings); m_seq_col.saveSettingsTo(settings); m_accsn_col.saveSettingsTo(settings); m_alignment_type.saveSettingsTo(settings); }
/** {@inheritDoc} */ @Override protected void loadValidatedSettingsFrom(final NodeSettingsRO settings) throws InvalidSettingsException { m_email.loadSettingsFrom(settings); m_seq_col.loadSettingsFrom(settings); m_accsn_col.loadSettingsFrom(settings); m_alignment_type.loadSettingsFrom(settings); }
@Override protected DataTableSpec[] configure(DataTableSpec[] inSpecs) throws InvalidSettingsException { List<File> inputFiles = FileSelectPanel.getInputFiles(propInputDir.getStringValue(), getAllowedFileExtensions()); if (inputFiles.isEmpty()) { return new DataTableSpec[] {new DataTableSpec()}; } // first group files into plate-groups Map<String, List<File>> plateFiles = splitFilesIntoPlates(inputFiles); if (inputFiles.isEmpty()) { throw new RuntimeException("No valid envision-files in selection " + inputFiles); } // compile the column-model List<Attribute> colAttributes = compileColumnModel(mergeAttributes(plateFiles)); return new DataTableSpec[] {AttributeUtils.compileTableSpecs(colAttributes)}; }
@Override protected void updateComponent() { if (beforeLoad) { // This is immediately *after* load! newColumns.clear(); newColumns.addAll( Arrays.asList(((SettingsModelStringArray) getModel()).getStringArrayValue())); beforeLoad = false; // Never again } final PortObjectSpec lastTableSpec = getLastTableSpec(portIndex); final List<DataRow> dataRows = new ArrayList<DataRow>(); final List<DataColumnSpec> colSpecs = new ArrayList<DataColumnSpec>(); if (lastTableSpec instanceof DataTableSpec) { final DataTableSpec spec = (DataTableSpec) lastTableSpec; try { final Map<List<String>, Map<String, Integer>> parts = UnpivotNodeModel.createParts(pattern.getStringValue(), spec); final Set<Integer> participating = new HashSet<Integer>(); for (final Map<String, Integer> map : parts.values()) { for (final Integer i : map.values()) { participating.add(i); } } for (int i = 0; i < spec.getNumColumns(); ++i) { if (!participating.contains(Integer.valueOf(i))) { colSpecs.add(spec.getColumnSpec(i)); } } singleCount = colSpecs.size(); int rowCount = 0; if (parts.isEmpty() || newColumns.size() != parts.keySet().iterator().next().size()) { newColumns.clear(); if (!parts.isEmpty()) { // Fill with defaults for (int i = parts.keySet().iterator().next().size(); i-- > 0; ) { final String colName = "Col_" + i; newColumns.add(colName); colSpecs.add(new DataColumnSpecCreator(colName, StringCell.TYPE).createSpec()); } } } else { for (final String colName : newColumns) { colSpecs.add(new DataColumnSpecCreator(colName, StringCell.TYPE).createSpec()); } } final Map<String, DataType> types = new LinkedHashMap<String, DataType>(); for (final Entry<List<String>, Map<String, Integer>> outer : parts.entrySet()) { final Map<String, Integer> map = outer.getValue(); for (final Entry<String, Integer> entry : map.entrySet()) { final String colName = entry.getKey(); final DataType origType = types.get(colName); if (origType == null) { types.put(colName, spec.getColumnSpec(entry.getValue().intValue()).getType()); } else { types.put( colName, DataType.getCommonSuperType( origType, spec.getColumnSpec(entry.getValue().intValue()).getType())); } } } for (final Entry<String, DataType> entry : types.entrySet()) { colSpecs.add(new DataColumnSpecCreator(entry.getKey(), entry.getValue()).createSpec()); } for (final Entry<List<String>, Map<String, Integer>> entry : parts.entrySet()) { final List<DataCell> cells = new ArrayList<DataCell>(colSpecs.size()); for (int i = singleCount; i-- > 0; ) { cells.add(DataType.getMissingCell()); } for (final String val : entry.getKey()) { cells.add(new StringCell(val)); } for (final Entry<String, Integer> inner : entry.getValue().entrySet()) { final DataColumnDomain domain = spec.getColumnSpec(inner.getValue().intValue()).getDomain(); cells.add( domain.getLowerBound() == null ? domain.getValues() == null || domain.getValues().isEmpty() ? DataType.getMissingCell() : domain.getValues().iterator().next() : domain.getLowerBound()); } dataRows.add(new DefaultRow("Row" + rowCount, cells)); ++rowCount; } } catch (final PatternSyntaxException e) { for (final DataColumnSpec dataColumnSpec : spec) { colSpecs.add(dataColumnSpec); } } } @SuppressWarnings("deprecation") final DataTable data = new org.knime.core.data.def.DefaultTable( dataRows.toArray(new DataRow[dataRows.size()]), new DataTableSpec(colSpecs.toArray(new DataColumnSpec[colSpecs.size()]))); table.setDataTable(data); table.repaint(); }
@Override protected BufferedDataTable[] execute(BufferedDataTable[] inData, ExecutionContext exec) throws Exception { List<File> inputFiles = FileSelectPanel.getInputFiles(propInputDir.getStringValue(), getAllowedFileExtensions()); if (inputFiles.isEmpty()) { throw new RuntimeException("No files selected"); } // first group files into plate-groups Map<String, List<File>> plateFiles = splitFilesIntoPlates(inputFiles); if (inputFiles.isEmpty()) { throw new RuntimeException("No valid envision-files in selection " + inputFiles); } // split files List<String> allAttributes = mergeAttributes(plateFiles); List<Attribute> colAttributes = compileColumnModel(allAttributes); DataTableSpec outputSpec = AttributeUtils.compileTableSpecs(colAttributes); BufferedDataContainer container = exec.createDataContainer(outputSpec); // populate the table int fileCounter = 0, rowCounter = 0; for (String barcode : plateFiles.keySet()) { logger.info("Processing plate " + barcode); Plate plate = new Plate(); // invalidate plate-dims as these become fixed in the loop plate.setNumColumns(-1); plate.setNumRows(-1); for (File file : plateFiles.get(barcode)) { String attributeName = getAttributeNameOfEnvisionFile(file); parseFile(plate, attributeName, file); BufTableUtils.updateProgress(exec, fileCounter++, inputFiles.size()); } // now create the data-rows for this table for (Well well : plate.getWells()) { if (well.getReadOutNames().isEmpty()) { continue; } DataCell[] knimeRow = new DataCell[colAttributes.size()]; // first add the barcode-column knimeRow[0] = new StringCell(barcode); knimeRow[1] = colAttributes.get(1).createCell(well.getPlateRow()); knimeRow[2] = colAttributes.get(2).createCell(well.getPlateColumn()); for (String attributeName : allAttributes) { int rowIndex = allAttributes.indexOf(attributeName); Double value = well.getReadout(attributeName); if (value != null) { knimeRow[3 + rowIndex] = new DoubleCell(value); } else { knimeRow[3 + rowIndex] = DataType.getMissingCell(); } } DataRow tableRow = new DefaultRow(new RowKey("" + rowCounter++), knimeRow); container.addRowToTable(tableRow); } } container.close(); return new BufferedDataTable[] {container.getTable()}; }
/** {@inheritDoc} */ @Override protected void validateSettings(final NodeSettingsRO settings) throws InvalidSettingsException { foodFile.validateSettings(settings); outPath.validateSettings(settings); method.validateSettings(settings); }
@Override protected void saveSettingsTo(final NodeSettingsWO settings) { fileName.saveSettingsTo(settings); packageName.saveSettingsTo(settings); }
/** {@inheritDoc} */ @Override protected void saveSettingsTo(final NodeSettingsWO settings) { foodFile.saveSettingsTo(settings); outPath.saveSettingsTo(settings); method.saveSettingsTo(settings); }
@Override protected void loadValidatedSettingsFrom(final NodeSettingsRO settings) throws InvalidSettingsException { fileName.loadSettingsFrom(settings); packageName.loadSettingsFrom(settings); }
/** {@inheritDoc} */ @Override protected BufferedDataTable[] execute( final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception { if (m_email.equals(DEFAULT_EMAIL)) { throw new Exception( "You must set a valid E-Mail for EBI to contact you in the event of problems with the service!"); } int n_rows = inData[0].getRowCount(); int seq_idx = inData[0].getSpec().findColumnIndex(((SettingsModelString) m_seq_col).getStringValue()); int accsn_idx = inData[0].getSpec().findColumnIndex(((SettingsModelString) m_accsn_col).getStringValue()); if (seq_idx < 0 || accsn_idx < 0) { throw new Exception("Cannot find columns... valid data?"); } int done = 0; // create the output columns (raw format for use with R) DataTableSpec outputSpec = new DataTableSpec(inData[0].getDataTableSpec(), make_output_spec()); BufferedDataContainer container = exec.createDataContainer(outputSpec, false, 0); // instantiate MUSCLE client MuscleClient cli = new MuscleClient(); // each row is a separate MUSCLE job, the sequences are in one collection cell, the accessions // (IDs) in the other RowIterator it = inData[0].iterator(); while (it.hasNext()) { DataRow r = it.next(); ListCell seqs = (ListCell) r.getCell(seq_idx); ListCell accsns = (ListCell) r.getCell(accsn_idx); if (seqs.size() != accsns.size()) { throw new Exception( "Every sequence must have a corresponding accession: error at row " + r.getKey().getString()); } if (seqs.size() < 1) { throw new Exception("Cannot MUSCLE zero sequences: error at row " + r.getKey().getString()); } if (seqs.size() > 1000) { throw new Exception("Too many sequences in row " + r.getKey().getString()); } // dummy a fake "FASTA" file (in memory) and then submit that to MUSCLE@EBI along with other // necessary parameters StringBuffer seq_as_fasta = new StringBuffer(); for (int i = 0; i < seqs.size(); i++) { seq_as_fasta.append(">"); seq_as_fasta.append(accsns.get(i).toString()); seq_as_fasta.append("\n"); seq_as_fasta.append(seqs.get(i).toString()); seq_as_fasta.append("\n"); } // System.err.println(seq_as_fasta); // lodge the muscle job and store the results in the output table InputParameters ip = new InputParameters(); ip.setSequence(seq_as_fasta.toString()); // start the job String jobId = cli.runApp(m_email.getStringValue(), r.getKey().getString(), ip); exec.checkCanceled(); exec.setProgress(((double) done) / n_rows, "Executing " + jobId); Thread.sleep(20 * 1000); // 20 seconds waitForCompletion(cli, exec, jobId); done++; // process results and add them into the table... // 1. fasta alignment data byte[] bytes = cli.getSrvProxy().getResult(jobId, "aln-fasta", null); DataCell[] cells = new DataCell[3]; cells[0] = new StringCell(jobId); // compute the base64 encoded phylip aligned sequences suitable for use by R's phangorn // package String fasta = new String(bytes); String ret = fasta2phylip(fasta); // it must be encoded (I chose base64) as it is common to both Java and R and it must be // encoded due to containing multiple lines, which confuses the CSV passed between KNIME and R String rk = r.getKey().getString(); DataCell mac = AlignmentCellFactory.createCell(fasta, AlignmentType.AL_AA); if (mac instanceof MultiAlignmentCell) m_muscle_map.put(rk, (MultiAlignmentCell) mac); cells[1] = mac; bytes = cli.getSrvProxy().getResult(jobId, "out", null); cells[2] = new StringCell("<html><pre>" + new String(bytes)); container.addRowToTable(new JoinedRow(r, new DefaultRow(r.getKey(), cells))); } container.close(); BufferedDataTable out = container.getTable(); return new BufferedDataTable[] {out}; }
@Override protected void validateSettings(final NodeSettingsRO settings) throws InvalidSettingsException { fileName.validateSettings(settings); packageName.validateSettings(settings); }