/** Trains a model using an ExampleSet from the input. Uses the method learn(ExampleSet). */
  @Override
  public void doWork() throws OperatorException {
    ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class);

    // some checks
    if (exampleSet.getAttributes().getLabel() == null) {
      throw new UserError(this, 105, new Object[0]);
    }
    if (exampleSet.getAttributes().size() == 0) {
      throw new UserError(this, 106, new Object[0]);
    }

    // check capabilities and produce errors if they are not fulfilled
    CapabilityCheck check =
        new CapabilityCheck(
            this,
            Tools.booleanValue(
                ParameterService.getParameterValue(
                    CapabilityProvider.PROPERTY_RAPIDMINER_GENERAL_CAPABILITIES_WARN),
                true));
    check.checkLearnerCapabilities(this, exampleSet);

    Model model = learn(exampleSet);

    modelOutput.deliver(model);
    exampleSetOutput.deliver(exampleSet);
  }
  @Override
  public void doWork() throws OperatorException {
    ExampleSet originalExampleSet = exampleSetInput.getData(ExampleSet.class);

    double fraction = getParameterAsDouble(PARAMETER_FRACTION);
    if (fraction < 0 || fraction > 1.0)
      throw new UserError(
          this,
          207,
          new Object[] {
            fraction, "fraction", "Cannot use fractions of less than 0.0 or more than 1.0"
          });
    SplittedExampleSet splitted =
        new SplittedExampleSet(
            originalExampleSet,
            fraction,
            getParameterAsInt(PARAMETER_SAMPLING_TYPE),
            getParameterAsBoolean(RandomGenerator.PARAMETER_USE_LOCAL_RANDOM_SEED),
            getParameterAsInt(RandomGenerator.PARAMETER_LOCAL_RANDOM_SEED));
    splitted.selectSingleSubset(0);

    exampleSubsetInnerSource.deliver(splitted);
    getSubprocess(0).execute();
    modelOutput.deliver(modelInnerSink.getData(IOObject.class));
  }
  /**
   * Gets the input data and macro name and iterates over the example set while updating the current
   * iteration in the given macro.
   */
  @Override
  public void doWork() throws OperatorException {
    outExtender.reset();
    ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class);
    String iterationMacroName = getParameterAsString(PARAMETER_ITERATION_MACRO);
    boolean innerSinkIsConnected = exampleSetInnerSink.isConnected();

    for (iteration = 1; iteration <= exampleSet.size(); iteration++) {

      getProcess().getMacroHandler().addMacro(iterationMacroName, String.valueOf(iteration));

      // passing in clone or if connected the result from last iteration
      exampleSetInnerSource.deliver(
          innerSinkIsConnected ? exampleSet : (ExampleSet) exampleSet.clone());
      getSubprocess(0).execute();
      inApplyLoop();

      if (innerSinkIsConnected) {
        exampleSet = exampleSetInnerSink.getData(ExampleSet.class);
      }

      outExtender.collect();
    }

    getProcess().getMacroHandler().removeMacro(iterationMacroName);
    exampleSetOutput.deliver(exampleSet);
  }
  @Override
  public final void doWork() throws OperatorException {
    ExampleSet inputExampleSet = exampleSetInput.getData(ExampleSet.class);
    ExampleSet applySet = null;
    // check for needed copy of original exampleset
    if (originalOutput.isConnected() && writesIntoExistingData()) {
      int type = DataRowFactory.TYPE_DOUBLE_ARRAY;
      if (inputExampleSet.getExampleTable() instanceof MemoryExampleTable) {
        DataRowReader dataRowReader = inputExampleSet.getExampleTable().getDataRowReader();
        if (dataRowReader.hasNext()) {
          type = dataRowReader.next().getType();
        }
      }
      // check if type is supported to be copied
      if (type >= 0) {
        applySet = MaterializeDataInMemory.materializeExampleSet(inputExampleSet, type);
      }
    }

    if (applySet == null) applySet = (ExampleSet) inputExampleSet.clone();

    // we apply on the materialized data, because writing can't take place in views anyway.
    ExampleSet result = apply(applySet);
    originalOutput.deliver(inputExampleSet);
    exampleSetOutput.deliver(result);
  }
  /**
   * Displays an error bubble that alerts the user that an input port of an operator expected input
   * but did not receive any. The bubble is located at the port and the process view will change to
   * said port. This method is used after the error occurred during process execution.
   *
   * @param port the port for which to display the error
   * @return the {@link PortInfoBubble} instance, never {@code null}
   */
  public static PortInfoBubble displayInputPortNoDataInformation(final Port port) {
    if (port == null) {
      throw new IllegalArgumentException("port must not be null!");
    }

    String key;
    if (port.isConnected()) {
      key = "process_mandatory_input_port_no_data";
    } else {
      // PortOwner is an interface only implemented from anonymous inner classes
      // so check enclosing class and differentiate operator input ports and subprocess
      // (result) input ports
      if (ExecutionUnit.class.isAssignableFrom(
          port.getPorts().getOwner().getClass().getEnclosingClass())) {
        key = "process_mandatory_input_port_no_data_unconnected_inner";
      } else {
        key = "process_mandatory_input_port_no_data_unconnected";
      }
    }
    String opName = "";
    if (port instanceof InputPort) {
      InputPort inPort = (InputPort) port;
      OutputPort source = inPort.getSource();
      if (source != null) {
        // not the user entered name because that could be god knows how long
        opName = source.getPorts().getOwner().getOperator().getOperatorDescription().getName();
      }
    }
    return displayMissingInputPortInformation(port, !port.isConnected(), true, key, opName);
  }
 public void transformMetaData() {
   List<Operator> sorted = topologicalSort();
   for (Operator op : sorted) {
     op.transformMetaData();
   }
   if (sorted.size() != operators.size()) {
     List<Operator> remainder = new LinkedList<Operator>(operators);
     remainder.removeAll(sorted);
     for (Operator nodeInCircle : remainder) {
       for (OutputPort outputPort : nodeInCircle.getOutputPorts().getAllPorts()) {
         InputPort destination = outputPort.getDestination();
         if ((destination != null)
             && remainder.contains(destination.getPorts().getOwner().getOperator())) {
           if (destination.getSource() != null) {
             // (source can be null *during* a disconnect in which case
             // both the source and the destination fire an update
             // which leads to this inconsistent state)
             destination.addError(new OperatorLoopError(destination));
           }
           outputPort.addError(new OperatorLoopError(outputPort));
         }
       }
     }
   }
   getInnerSinks().checkPreconditions();
 }
  @Override
  public void doWork() throws OperatorException {
    CentroidClusterModel model = modelInput.getData(CentroidClusterModel.class);

    Attributes trainAttributes = model.getTrainingHeader().getAttributes();
    String[] attributeNames = model.getAttributeNames();
    Attribute[] attributes = new Attribute[attributeNames.length + 1];
    for (int i = 0; i < attributeNames.length; i++) {
      Attribute originalAttribute = trainAttributes.get(attributeNames[i]);
      attributes[i] =
          AttributeFactory.createAttribute(attributeNames[i], originalAttribute.getValueType());
      if (originalAttribute.isNominal()) {
        attributes[i].setMapping((NominalMapping) originalAttribute.getMapping().clone());
      }
    }
    Attribute clusterAttribute = AttributeFactory.createAttribute("cluster", Ontology.NOMINAL);
    attributes[attributes.length - 1] = clusterAttribute;

    MemoryExampleTable table = new MemoryExampleTable(attributes);
    for (int i = 0; i < model.getNumberOfClusters(); i++) {
      double[] data = new double[attributeNames.length + 1];
      System.arraycopy(model.getCentroidCoordinates(i), 0, data, 0, attributeNames.length);
      data[attributeNames.length] = clusterAttribute.getMapping().mapString("cluster_" + i);
      table.addDataRow(new DoubleArrayDataRow(data));
    }

    ExampleSet resultSet = table.createExampleSet();
    resultSet.getAttributes().setSpecialAttribute(clusterAttribute, Attributes.CLUSTER_NAME);

    modelOutput.deliver(model);
    exampleSetOutput.deliver(resultSet);
  }
  @Override
  public void doWork() throws OperatorException {

    ExampleSet exampleSet = exampleSetInput.getData();

    IEntityMapping user_mapping = new EntityMapping();
    IEntityMapping item_mapping = new EntityMapping();
    IRatings training_data = new Ratings();

    if (exampleSet.getAttributes().getSpecial("user identification") == null) {
      throw new UserError(this, 105);
    }

    if (exampleSet.getAttributes().getSpecial("item identification") == null) {
      throw new UserError(this, 105);
    }

    if (exampleSet.getAttributes().getLabel() == null) {
      throw new UserError(this, 105);
    }

    Attributes Att = exampleSet.getAttributes();
    AttributeRole ur = Att.getRole("user identification");
    Attribute u = ur.getAttribute();
    AttributeRole ir = Att.getRole("item identification");
    Attribute i = ir.getAttribute();
    Attribute ui = Att.getLabel();

    for (Example example : exampleSet) {

      double j = example.getValue(u);
      int uid = user_mapping.ToInternalID((int) j);

      j = example.getValue(i);
      int iid = item_mapping.ToInternalID((int) j);

      double r = example.getValue(ui);
      training_data.Add(uid, iid, r);
    }

    _slopeOne recommendAlg = new _slopeOne();

    recommendAlg.user_mapping = user_mapping;
    recommendAlg.item_mapping = item_mapping;
    recommendAlg.SetMinRating(getParameterAsInt("Min Rating"));
    recommendAlg.SetMaxRating(recommendAlg.GetMinRating() + getParameterAsInt("Range"));

    recommendAlg.SetRatings(training_data);

    recommendAlg.Train();

    exampleSetOutput.deliver(exampleSet);

    exampleSetOutput1.deliver(recommendAlg);
  }
 /**
  * Same as {@link #getSelectedFile()}, but returns true if file is specified (in the respective
  * way).
  */
 public boolean isFileSpecified() {
   if (!fileOutputPort.isConnected()) {
     return operator.isParameterSet(fileParameterName);
   } else {
     try {
       return (fileOutputPort.getData(IOObject.class) instanceof FileObject);
     } catch (OperatorException e) {
       return false;
     }
   }
 }
 private void addReadyOutputs(LinkedList<OutputPort> readyOutputs, OutputPorts ports) {
   // add the parameters in a stack-like fashion like in pre-5.0
   // Iterator<OutputPort> i = ports.getAllPorts().iterator();
   Iterator<OutputPort> i = new LinkedList<OutputPort>(ports.getAllPorts()).descendingIterator();
   while (i.hasNext()) {
     OutputPort port = i.next();
     if (!port.isConnected() && port.shouldAutoConnect()) {
       readyOutputs.addLast(port);
     }
   }
 }
  @Override
  public void doWork() throws OperatorException {
    ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class);

    // only use numeric attributes
    Tools.onlyNumericalAttributes(exampleSet, "KernelPCA");
    Tools.onlyNonMissingValues(exampleSet, getOperatorClassName(), this);

    Attributes attributes = exampleSet.getAttributes();
    int numberOfExamples = exampleSet.size();

    // calculating means for later zero centering
    exampleSet.recalculateAllAttributeStatistics();
    double[] means = new double[exampleSet.getAttributes().size()];
    int i = 0;
    for (Attribute attribute : exampleSet.getAttributes()) {
      means[i] = exampleSet.getStatistics(attribute, Statistics.AVERAGE);
      i++;
    }

    // kernel
    Kernel kernel = Kernel.createKernel(this);

    // copying zero centered exampleValues
    ArrayList<double[]> exampleValues = new ArrayList<double[]>(numberOfExamples);
    i = 0;
    for (Example columnExample : exampleSet) {
      double[] columnValues = getAttributeValues(columnExample, attributes, means);
      exampleValues.add(columnValues);
      i++;
    }

    // filling kernel matrix
    Matrix kernelMatrix = new Matrix(numberOfExamples, numberOfExamples);
    for (i = 0; i < numberOfExamples; i++) {
      for (int j = 0; j < numberOfExamples; j++) {
        kernelMatrix.set(
            i, j, kernel.calculateDistance(exampleValues.get(i), exampleValues.get(j)));
      }
    }

    // calculating eigenVectors
    EigenvalueDecomposition eig = kernelMatrix.eig();
    Model model = new KernelPCAModel(exampleSet, means, eig.getV(), exampleValues, kernel);

    if (exampleSetOutput.isConnected()) {
      exampleSetOutput.deliver(model.apply(exampleSet));
    }
    originalOutput.deliver(exampleSet);
    modelOutput.deliver(model);
  }
  @Override
  public void doWork() throws OperatorException {
    Model model = modelInput.getData(Model.class);
    if (!(model instanceof KernelModel)) {
      throw new UserError(this, 122, "'support vector based model (kernel model)'");
    }

    PerformanceVector inputPerformance = performanceInput.getDataOrNull(PerformanceVector.class);

    PerformanceVector performance = count((KernelModel) model, inputPerformance);

    modelOutput.deliver(model);
    performanceOutput.deliver(performance);
  }
  @Override
  public void doWork() throws OperatorException {
    ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class);
    // needed for some measures
    Tools.checkAndCreateIds(exampleSet);

    DistanceMeasure measure = measureHelper.getInitializedMeasure(exampleSet);
    SimilarityMeasureObject measureObject = new SimilarityMeasureObject(measure, exampleSet);

    ObjectVisualizerService.addObjectVisualizer(measureObject, new ExampleVisualizer(exampleSet));

    similarityOutput.deliver(measureObject);
    exampleSetOutput.deliver(exampleSet);
  }
 /**
  * Returns the specified filename or "OutputFileObject" if the file OutputPort is connected.
  *
  * @return
  * @throws OperatorException
  */
 public String getSelectedFileDescription() throws OperatorException {
   if (!fileOutputPort.isConnected()) {
     return operator.getParameterAsString(fileParameterName);
   } else {
     return "OutputFileObject";
   }
 }
 @Override
 public void transformMD() {
   for (InputPort inputPort : inputPorts) {
     MetaData metaData = inputPort.getMetaData();
     if (metaData != null) {
       if (metaData instanceof CollectionMetaData) {
         metaData = ((CollectionMetaData) metaData).getElementMetaDataRecursive();
       }
       metaData = metaData.clone();
       metaData.addToHistory(outputPort);
       outputPort.deliverMD(modifyMetaData(metaData));
       return;
     }
   }
   outputPort.deliverMD(null);
 }
Example #16
0
  /** @see com.rapidminer.operator.OperatorChain#doWork() */
  @Override
  public void doWork() throws OperatorException {

    List<Operator> nested = this.getImmediateChildren();
    log.info("This StreamProcess has {} nested operators", nested.size());
    for (Operator op : nested) {
      log.info("  op: {}", op);

      if (op instanceof DataStreamOperator) {
        log.info("Resetting stream-operator {}", op);
        ((DataStreamOperator) op).reset();
      }
    }

    log.info("Starting some work in doWork()");
    ExampleSet exampleSet = input.getData(ExampleSet.class);
    log.info("input is an example set with {} examples", exampleSet.size());
    int i = 0;

    Iterator<Example> it = exampleSet.iterator();
    while (it.hasNext()) {
      Example example = it.next();
      log.info("Processing example {}", i);
      DataObject datum = StreamUtils.wrap(example);
      log.info("Wrapped data-object is: {}", datum);
      dataStream.deliver(datum);
      getSubprocess(0).execute();
      inApplyLoop();
      i++;
    }

    // super.doWork();
    log.info("doWork() is finished.");
  }
  @Override
  public double[] doWork(
      ExampleSet exampleSet, Attributes attributes, double[][] points, int[] weight)
      throws OperatorException {
    DistanceMeasure measure = measureHelper.getInitializedMeasure(exampleSet);
    int n = points.length;
    int k = getParameterAsInt(PARAMETER_K);
    boolean kth = getParameterAsBoolean(PARAMETER_KTH_NEIGHBOR_DISTANCE);
    boolean parallel = getParameterAsBoolean(PARAMETER_PARALLELIZE_EVALUATION_PROCESS);
    int numberOfThreads = getParameterAsInt(PARAMETER_NUMBER_OF_THREADS);
    double[] ret = {1};

    if (n > 1) {
      if (k >= n) {
        this.logWarning(
            "Setting " + KNNAnomalyDetectionOperator.PARAMETER_K + " to #Datapoints-1.");
        k = n - 1;
        // this.setParameter(KNNAnomalyDetectionOperator.PARAMETER_K, (n-1) + "");
      }
      readModel(n, k, points, weight, measure);
      KNNEvaluator evaluator =
          new KNNEvaluator(
              knnCollection, kth, measure, parallel, numberOfThreads, this, n, k, newCollection);
      ret = evaluator.evaluate();
      model = new KNNCollectionModel(exampleSet, knnCollection, measure);
      modelOutput.deliver(model);
      knnCollection = null;
    }
    return ret;
  }
Example #18
0
  @Override
  public void doWork() throws OperatorException {

    ExampleSet exampleSet = exampleSetInput.getData();

    IPosOnlyFeedback training_data = new PosOnlyFeedback();
    IEntityMapping user_mapping = new EntityMapping();
    IEntityMapping item_mapping = new EntityMapping();

    if (exampleSet.getAttributes().getSpecial("user identification") == null) {
      throw new UserError(this, 105);
    }

    if (exampleSet.getAttributes().getSpecial("item identification") == null) {
      throw new UserError(this, 105);
    }

    Attributes Att = exampleSet.getAttributes();
    AttributeRole ur = Att.getRole("user identification");
    Attribute u = ur.getAttribute();
    AttributeRole ir = Att.getRole("item identification");
    Attribute i = ir.getAttribute();

    for (Example example : exampleSet) {

      double j = example.getValue(u);
      int uid = (int) j;

      j = example.getValue(i);
      int iid = (int) j;

      training_data.Add(user_mapping.ToInternalID(uid), item_mapping.ToInternalID(iid));
      checkForStop();
    }

    System.out.println(training_data.GetMaxItemID() + " " + training_data.GetMaxUserID());

    Random recommendAlg = new Random();
    recommendAlg.SetFeedback(training_data);
    recommendAlg.user_mapping = user_mapping;
    recommendAlg.item_mapping = item_mapping;
    recommendAlg.Train();

    exampleSetOutput.deliver(exampleSet);
    exampleSetOutput1.deliver(recommendAlg);
  }
 @Override
 public boolean shouldAutoConnect(OutputPort outputPort) {
   if (outputPort.getName().startsWith("result")) {
     return getParameterAsBoolean("keep_output");
   } else {
     return super.shouldAutoConnect(outputPort);
   }
 }
 @SuppressWarnings("deprecation")
 private void autoWire(
     CompatibilityLevel level, InputPorts inputPorts, LinkedList<OutputPort> readyOutputs)
     throws PortException {
   boolean success = false;
   do {
     Set<InputPort> complete = new HashSet<InputPort>();
     for (InputPort in : inputPorts.getAllPorts()) {
       success = false;
       if (!in.isConnected()
           && !complete.contains(in)
           && in.getPorts().getOwner().getOperator().shouldAutoConnect(in)) {
         Iterator<OutputPort> outIterator;
         // TODO: Simon: Does the same in both cases. Check again.
         if (in.simulatesStack()) {
           outIterator = readyOutputs.descendingIterator();
         } else {
           outIterator = readyOutputs.descendingIterator();
         }
         while (outIterator.hasNext()) {
           OutputPort outCandidate = outIterator.next();
           // TODO: Remove shouldAutoConnect() in later versions
           Operator owner = outCandidate.getPorts().getOwner().getOperator();
           if (owner.shouldAutoConnect(outCandidate)) {
             if (outCandidate.getMetaData() != null) {
               if (in.isInputCompatible(outCandidate.getMetaData(), level)) {
                 readyOutputs.remove(outCandidate);
                 outCandidate.connectTo(in);
                 // we cannot continue with the remaining input ports
                 // since connecting may have triggered the creation of new input ports
                 // which would result in undefined behavior and a ConcurrentModificationException
                 success = true;
                 break;
               }
             }
           }
         }
         // no port found.
         complete.add(in);
         if (success) {
           break;
         }
       }
     }
   } while (success);
 }
  private PerformanceVector getPerformance(ExampleSet exampleSet)
      throws OperatorException, MissingIOObjectException {
    innerExampleSetSource.deliver(exampleSet);

    getSubprocess(0).execute();

    return innerPerformanceSink.getData(PerformanceVector.class);
  }
  @Override
  public void doWork() throws OperatorException {
    DataTable table = null;
    if (isParameterSet(PARAMETER_LOG_NAME)) {
      String dataTableName = getParameterAsString(PARAMETER_LOG_NAME);
      table = getProcess().getDataTable(dataTableName);
    } else {
      if (getProcess().getDataTables().size() > 0) {
        table = getProcess().getDataTables().iterator().next();
        logNote("No log name was specified, using first data table found...");
      }
    }

    // check
    if (table == null) {
      throw new UserError(this, 939);
    }

    // create attributes
    List<Attribute> attributes = new ArrayList<Attribute>();
    for (int i = 0; i < table.getNumberOfColumns(); i++) {
      String name = table.getColumnName(i);
      if (table.isDate(i)) {
        attributes.add(AttributeFactory.createAttribute(name, Ontology.DATE));
      } else if (table.isDateTime(i)) {
        attributes.add(AttributeFactory.createAttribute(name, Ontology.DATE_TIME));
      } else if (table.isNumerical(i)) {
        attributes.add(AttributeFactory.createAttribute(name, Ontology.REAL));
      } else {
        attributes.add(AttributeFactory.createAttribute(name, Ontology.NOMINAL));
      }
    }

    // create table
    MemoryExampleTable exampleTable = new MemoryExampleTable(attributes);
    for (int r = 0; r < table.getNumberOfRows(); r++) {
      DataTableRow row = table.getRow(r);
      double[] data = new double[attributes.size()];
      for (int i = 0; i < table.getNumberOfColumns(); i++) {
        if (table.isDate(i)) {
          data[i] = row.getValue(i);
        } else if (table.isDateTime(i)) {
          data[i] = row.getValue(i);
        } else if (table.isNumerical(i)) {
          data[i] = row.getValue(i);
        } else {
          Attribute attribute = attributes.get(i);
          String value = table.getValueAsString(row, i);
          data[i] = attribute.getMapping().mapString(value);
        }
      }
      exampleTable.addDataRow(new DoubleArrayDataRow(data));
    }

    // create and return example set
    exampleSetOutput.deliver(exampleTable.createExampleSet());
    dummyPorts.passDataThrough();
  }
 /**
  * Sorts the operators topologically, i.e. such that operator <var>i</var> in the returned
  * ordering has dependencies (i.e. connected {@link InputPort}s) only from operators
  * <var>0..i-1</var>.
  */
 public Vector<Operator> topologicalSort() {
   final Map<Operator, Integer> originalIndices = new HashMap<Operator, Integer>();
   for (int i = 0; i < operators.size(); i++) {
     originalIndices.put(operators.get(i), i);
   }
   EdgeCounter counter = new EdgeCounter(operators);
   for (Operator child : getOperators()) {
     for (OutputPort out : child.getOutputPorts().getAllPorts()) {
       InputPort dest = out.getDestination();
       if (dest != null) {
         counter.incNumEdges(dest.getPorts().getOwner().getOperator());
       }
     }
   }
   Vector<Operator> sorted = new Vector<Operator>();
   PriorityQueue<Operator> independentOperators =
       new PriorityQueue<Operator>(
           Math.max(1, operators.size()),
           new Comparator<Operator>() {
             @Override
             public int compare(Operator o1, Operator o2) {
               return originalIndices.get(o1) - originalIndices.get(o2);
             }
           });
   independentOperators.addAll(counter.getIndependentOperators());
   while (!independentOperators.isEmpty()) {
     Operator first = independentOperators.poll();
     sorted.add(first);
     for (OutputPort out : first.getOutputPorts().getAllPorts()) {
       InputPort dest = out.getDestination();
       if (dest != null) {
         Operator destOp = dest.getPorts().getOwner().getOperator();
         if (counter.decNumEdges(destOp) == 0) {
           // independentOperators.addFirst(destOp);
           independentOperators.add(destOp);
         }
       }
     }
   }
   return sorted;
 }
  @Override
  public void doWork() throws OperatorException {

    // retrieve document
    Document input = documentInput.getData(Document.class);

    // encrypt/decrypt text
    String text = transformText(configureEncryptor(), getText(input));

    // deliver transformed document
    documentOut.deliver(new Document(text));
  }
  /**
   * Moves the operators from this process to another process, keeping all connections intact. TODO:
   * Test more rigorously. Do we register/unregister everything correctly?
   *
   * @return the number of ports the connections of which could not be restored
   */
  public int stealOperatorsFrom(ExecutionUnit otherUnit) {
    int failedReconnects = 0;

    // remember source and sink connections so we can reconnect them later.
    Map<String, InputPort> sourceMap = new HashMap<String, InputPort>();
    Map<String, OutputPort> sinkMap = new HashMap<String, OutputPort>();
    for (OutputPort source : otherUnit.getInnerSources().getAllPorts()) {
      if (source.isConnected()) {
        sourceMap.put(source.getName(), source.getDestination());
      }
    }
    otherUnit.getInnerSources().disconnectAll();
    for (InputPort sink : otherUnit.getInnerSinks().getAllPorts()) {
      if (sink.isConnected()) {
        sinkMap.put(sink.getName(), sink.getSource());
      }
    }
    otherUnit.getInnerSinks().disconnectAll();

    // Move operators
    Iterator<Operator> i = otherUnit.operators.iterator();
    while (i.hasNext()) {
      Operator operator = i.next();
      i.remove();
      otherUnit.unregister(operator);
      Process otherProcess = operator.getProcess();
      if (otherProcess != null) {
        operator.unregisterOperator(otherProcess);
      }
      this.operators.add(operator);
      operator.setEnclosingProcess(null);
      // operator.unregisterOperator(operator.getProcess());
      registerOperator(operator, true);
      // operator.registerOperator(this.getEnclosingOperator().getProcess());
    }

    // Rewire sources and sinks
    for (Map.Entry<String, InputPort> entry : sourceMap.entrySet()) {
      OutputPort mySource = getInnerSources().getPortByName(entry.getKey());
      if (mySource != null) {
        mySource.connectTo(entry.getValue());
      } else {
        failedReconnects++;
      }
    }
    getInnerSources().unlockPortExtenders();

    for (Map.Entry<String, OutputPort> entry : sinkMap.entrySet()) {
      InputPort mySink = getInnerSinks().getPortByName(entry.getKey());
      if (mySink != null) {
        entry.getValue().connectTo(mySink);
      } else {
        failedReconnects++;
      }
    }
    getInnerSinks().unlockPortExtenders();

    fireUpdate(this);
    return failedReconnects;
  }
  @Override
  public void doWork() throws OperatorException {
    IOObject data = inputPort.getData(IOObject.class);
    Annotations annotations = data.getAnnotations();
    Attribute annotationAttr =
        AttributeFactory.createAttribute(ANNOTATION_ATTRIBUTE, Ontology.POLYNOMINAL);
    Attribute valueAttr = AttributeFactory.createAttribute(VALUE_ATTRIBUTE, Ontology.POLYNOMINAL);

    MemoryExampleTable table = new MemoryExampleTable(annotationAttr, valueAttr);

    for (String annotation : annotations.getDefinedAnnotationNames()) {
      double[] rowData = new double[2];
      rowData[0] = annotationAttr.getMapping().mapString(annotation);
      rowData[1] = valueAttr.getMapping().mapString(annotations.getAnnotation(annotation));
      table.addDataRow(new DoubleArrayDataRow(rowData));
    }

    ExampleSet exampleSet = table.createExampleSet();
    exampleSet.getAttributes().setSpecialAttribute(annotationAttr, Attributes.ID_NAME);
    outputPort.deliver(data);
    annotationsOutputPort.deliver(exampleSet);
  }
  @Override
  public void doWork() throws OperatorException {

    // WhiBoCentroidClusterModel ccm = clusterModelInput.getData();
    ClusterModel ccm = clusterModelInput.getData();

    int size = 0;
    // ExampleSet exampleSet = getInput(ExampleSet.class);
    ExampleSet exampleSet = exampleSetInput.getData();
    // getValidations(ccm, exampleSet);

    validationOutput.deliver(getValidations(ccm, exampleSet));
  }
  @Override
  public void doWork() throws OperatorException {
    AbstractNormalizationModel model = modelInput.getData(AbstractNormalizationModel.class);

    // check how to behave if an Attribute is missing in the input ExampleSet
    if (getParameter(PARAMETER_MISSING_ATTRIBUTES_KEY).equals(FAIL_ON_MISSING)) {
      failOnMissingAttributes = true;
    } else {
      failOnMissingAttributes = false;
    }

    Map<String, LinearTransformation> attributeTransformations = new HashMap<>();
    for (Attribute attribute : model.getTrainingHeader().getAttributes()) {
      double b = model.computeValue(attribute, 0);
      double a = model.computeValue(attribute, 1) - b;

      attributeTransformations.put(attribute.getName(), new LinearTransformation(a, b));
    }

    modelOutput.deliver(
        new DenormalizationModel(
            model.getTrainingHeader(), attributeTransformations, model, failOnMissingAttributes));
    originalModelOutput.deliver(model);
  }
  /** @see com.rapidminer.operator.Operator#doWork() */
  @Override
  public void doWork() throws OperatorException {
    FactEventObject object = input.getData(FactEventObject.class);

    if (drsCalibration == null) {

      File drsFile = getParameterAsFile(DRS_FILE);

      drsCalibration = new DrsCalibration();
      drsCalibration.setDrsFile(drsFile.getAbsolutePath());
      drsCalibration.setKeepData(false);
    }

    Data calibratedEvent = drsCalibration.process(object);
    output.deliver(new FactEventObject(calibratedEvent));
  }
  @Override
  public final ExampleSet apply(ExampleSet exampleSet) throws OperatorException {
    ExampleSet workingSet =
        (isSupportingAttributeRoles())
            ? getSelectedAttributes(exampleSet)
            : new NonSpecialAttributesExampleSet(getSelectedAttributes(exampleSet));

    AbstractModel model = createPreprocessingModel(workingSet);
    model.setParameter(PARAMETER_CREATE_VIEW, getParameterAsBoolean(PARAMETER_CREATE_VIEW));
    if (getExampleSetOutputPort().isConnected()) {
      model.setOperator(this);
      exampleSet = model.apply(exampleSet);
      model.setOperator(null);
    }

    modelOutput.deliver(model);
    return exampleSet;
  }