/**
   * Gets the input data and macro name and iterates over the example set while updating the current
   * iteration in the given macro.
   */
  @Override
  public void doWork() throws OperatorException {
    outExtender.reset();
    ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class);
    String iterationMacroName = getParameterAsString(PARAMETER_ITERATION_MACRO);
    boolean innerSinkIsConnected = exampleSetInnerSink.isConnected();

    for (iteration = 1; iteration <= exampleSet.size(); iteration++) {

      getProcess().getMacroHandler().addMacro(iterationMacroName, String.valueOf(iteration));

      // passing in clone or if connected the result from last iteration
      exampleSetInnerSource.deliver(
          innerSinkIsConnected ? exampleSet : (ExampleSet) exampleSet.clone());
      getSubprocess(0).execute();
      inApplyLoop();

      if (innerSinkIsConnected) {
        exampleSet = exampleSetInnerSink.getData(ExampleSet.class);
      }

      outExtender.collect();
    }

    getProcess().getMacroHandler().removeMacro(iterationMacroName);
    exampleSetOutput.deliver(exampleSet);
  }
  /**
   * Displays an error bubble that alerts the user that an input port of an operator expected input
   * but did not receive any. The bubble is located at the port and the process view will change to
   * said port. This method is used after the error occurred during process execution.
   *
   * @param port the port for which to display the error
   * @return the {@link PortInfoBubble} instance, never {@code null}
   */
  public static PortInfoBubble displayInputPortNoDataInformation(final Port port) {
    if (port == null) {
      throw new IllegalArgumentException("port must not be null!");
    }

    String key;
    if (port.isConnected()) {
      key = "process_mandatory_input_port_no_data";
    } else {
      // PortOwner is an interface only implemented from anonymous inner classes
      // so check enclosing class and differentiate operator input ports and subprocess
      // (result) input ports
      if (ExecutionUnit.class.isAssignableFrom(
          port.getPorts().getOwner().getClass().getEnclosingClass())) {
        key = "process_mandatory_input_port_no_data_unconnected_inner";
      } else {
        key = "process_mandatory_input_port_no_data_unconnected";
      }
    }
    String opName = "";
    if (port instanceof InputPort) {
      InputPort inPort = (InputPort) port;
      OutputPort source = inPort.getSource();
      if (source != null) {
        // not the user entered name because that could be god knows how long
        opName = source.getPorts().getOwner().getOperator().getOperatorDescription().getName();
      }
    }
    return displayMissingInputPortInformation(port, !port.isConnected(), true, key, opName);
  }
 public void transformMetaData() {
   List<Operator> sorted = topologicalSort();
   for (Operator op : sorted) {
     op.transformMetaData();
   }
   if (sorted.size() != operators.size()) {
     List<Operator> remainder = new LinkedList<Operator>(operators);
     remainder.removeAll(sorted);
     for (Operator nodeInCircle : remainder) {
       for (OutputPort outputPort : nodeInCircle.getOutputPorts().getAllPorts()) {
         InputPort destination = outputPort.getDestination();
         if ((destination != null)
             && remainder.contains(destination.getPorts().getOwner().getOperator())) {
           if (destination.getSource() != null) {
             // (source can be null *during* a disconnect in which case
             // both the source and the destination fire an update
             // which leads to this inconsistent state)
             destination.addError(new OperatorLoopError(destination));
           }
           outputPort.addError(new OperatorLoopError(outputPort));
         }
       }
     }
   }
   getInnerSinks().checkPreconditions();
 }
  /**
   * Moves the operators from this process to another process, keeping all connections intact. TODO:
   * Test more rigorously. Do we register/unregister everything correctly?
   *
   * @return the number of ports the connections of which could not be restored
   */
  public int stealOperatorsFrom(ExecutionUnit otherUnit) {
    int failedReconnects = 0;

    // remember source and sink connections so we can reconnect them later.
    Map<String, InputPort> sourceMap = new HashMap<String, InputPort>();
    Map<String, OutputPort> sinkMap = new HashMap<String, OutputPort>();
    for (OutputPort source : otherUnit.getInnerSources().getAllPorts()) {
      if (source.isConnected()) {
        sourceMap.put(source.getName(), source.getDestination());
      }
    }
    otherUnit.getInnerSources().disconnectAll();
    for (InputPort sink : otherUnit.getInnerSinks().getAllPorts()) {
      if (sink.isConnected()) {
        sinkMap.put(sink.getName(), sink.getSource());
      }
    }
    otherUnit.getInnerSinks().disconnectAll();

    // Move operators
    Iterator<Operator> i = otherUnit.operators.iterator();
    while (i.hasNext()) {
      Operator operator = i.next();
      i.remove();
      otherUnit.unregister(operator);
      Process otherProcess = operator.getProcess();
      if (otherProcess != null) {
        operator.unregisterOperator(otherProcess);
      }
      this.operators.add(operator);
      operator.setEnclosingProcess(null);
      // operator.unregisterOperator(operator.getProcess());
      registerOperator(operator, true);
      // operator.registerOperator(this.getEnclosingOperator().getProcess());
    }

    // Rewire sources and sinks
    for (Map.Entry<String, InputPort> entry : sourceMap.entrySet()) {
      OutputPort mySource = getInnerSources().getPortByName(entry.getKey());
      if (mySource != null) {
        mySource.connectTo(entry.getValue());
      } else {
        failedReconnects++;
      }
    }
    getInnerSources().unlockPortExtenders();

    for (Map.Entry<String, OutputPort> entry : sinkMap.entrySet()) {
      InputPort mySink = getInnerSinks().getPortByName(entry.getKey());
      if (mySink != null) {
        entry.getValue().connectTo(mySink);
      } else {
        failedReconnects++;
      }
    }
    getInnerSinks().unlockPortExtenders();

    fireUpdate(this);
    return failedReconnects;
  }
 @Override
 protected void performAdditionalChecks() {
   super.performAdditionalChecks();
   try {
     InputPort referenceDataPort = getInputPorts().getPortByName(REFERENCE_DATA_PORT_NAME);
     String orderMode = getParameterAsString(PARAMETER_ORDER_MODE);
     if (orderMode.equals(REFERENCE_DATA) && !referenceDataPort.isConnected()) {
       addError(
           new SimpleMetaDataError(
               Severity.ERROR, referenceDataPort, "input_missing", REFERENCE_DATA_PORT_NAME));
     }
     if (!orderMode.equals(REFERENCE_DATA) && referenceDataPort.isConnected()) {
       addError(
           new SimpleMetaDataError(
               Severity.WARNING,
               referenceDataPort,
               "port_connected_but_parameter_not_set",
               REFERENCE_DATA_PORT_NAME,
               PARAMETER_ORDER_MODE,
               orderMode));
     }
   } catch (UndefinedParameterError e) {
     // nothing to do here
   }
 }
  public void readModel(int n, int k, double[][] points, int[] weight, DistanceMeasure measure)
      throws OperatorException {
    if (modelInput.isConnected()) {
      KNNCollectionModel input;
      input = modelInput.getData(KNNCollectionModel.class);
      knnCollection = input.get();
      newCollection = false;
      if (k > knnCollection.getK()
          || !Arrays.deepEquals(knnCollection.getPoints(), points)
          || !measure.getClass().toString().equals(input.measure.getClass().toString())) {
        if (k > knnCollection.getK()) {
          this.logNote("Model at input port can not be used (k too small).");
        } else {
          this.logNote("Model at input port can not be used (Model andExampleSet not matching).");
        }
        knnCollection = new KNNCollection(n, k, points, weight);
        newCollection = true;

      } else {
        this.logNote(" Model at input port used for speeding up the operator.");
      }
      if (k < knnCollection.getK()) {
        knnCollection = KNNCollection.clone(knnCollection);
        knnCollection.shrink(knnCollection.getK() - k);
      }
    } else {
      knnCollection = new KNNCollection(n, k, points, weight);
      newCollection = true;
    }
  }
  @Override
  public void doWork() throws OperatorException {
    ExampleSet originalExampleSet = exampleSetInput.getData(ExampleSet.class);

    double fraction = getParameterAsDouble(PARAMETER_FRACTION);
    if (fraction < 0 || fraction > 1.0)
      throw new UserError(
          this,
          207,
          new Object[] {
            fraction, "fraction", "Cannot use fractions of less than 0.0 or more than 1.0"
          });
    SplittedExampleSet splitted =
        new SplittedExampleSet(
            originalExampleSet,
            fraction,
            getParameterAsInt(PARAMETER_SAMPLING_TYPE),
            getParameterAsBoolean(RandomGenerator.PARAMETER_USE_LOCAL_RANDOM_SEED),
            getParameterAsInt(RandomGenerator.PARAMETER_LOCAL_RANDOM_SEED));
    splitted.selectSingleSubset(0);

    exampleSubsetInnerSource.deliver(splitted);
    getSubprocess(0).execute();
    modelOutput.deliver(modelInnerSink.getData(IOObject.class));
  }
  @Override
  public void doWork() throws OperatorException {

    // WhiBoCentroidClusterModel ccm = clusterModelInput.getData();
    ClusterModel ccm = clusterModelInput.getData();

    int size = 0;
    // ExampleSet exampleSet = getInput(ExampleSet.class);
    ExampleSet exampleSet = exampleSetInput.getData();
    // getValidations(ccm, exampleSet);

    validationOutput.deliver(getValidations(ccm, exampleSet));
  }
 @Override
 public void doWork() throws OperatorException {
   AlignmentAnalyzerParametersImpl params;
   try {
     params = parseParameters();
   } catch (IOException e) {
     throw new OperatorException(e.getMessage());
   }
   PluginContext context =
       ProMPluginContextManager.instance()
           .getFutureResultAwareContext(AlignmentAPNAnalyzerPlugin.class);
   XSEventStream stream = getStreamPort().getData(XSEventStreamIOObject.class).getArtifact();
   XLogArray arr = new XLogArray();
   for (InputPort i : referenceLogsPort.getManagedPorts()) {
     try {
       arr.add(i.getData(XLogIOObject.class).getArtifact());
     } catch (UserError e) {
     }
   }
   // TODO: use meta-data to process classifiers!
   List<XEventClassifier> classifiers = fetchClassifiers(arr);
   params.setClassifier(classifiers.get(0));
   List<XSEventStreamToAcceptingPetriNetReader> algos =
       new ArrayList<XSEventStreamToAcceptingPetriNetReader>();
   for (InputPort i : getAlgorithmsPort().getManagedPorts()) {
     try {
       algos.add(
           (XSEventStreamToAcceptingPetriNetReader)
               i.getData(XSEventStreamToAcceptingPetriNetReaderIOObject.class).getArtifact());
     } catch (UserError e) {
     }
   }
   XSStreamAnalyzer<
           XSEvent,
           Map<XSReader<XSEvent, AcceptingPetriNet>, Map<Long, Iterable<Iterable<Double>>>>,
           AcceptingPetriNet>
       analyzer =
           AlignmentAPNAnalyzerPlugin.run(
               context,
               stream,
               arr,
               params,
               algos.toArray(new XSEventStreamToAcceptingPetriNetReader[algos.size()]));
   getAnalyzerPort()
       .deliver(
           new XSStreamAnalyzerIOObject<
               XSEvent,
               Map<XSReader<XSEvent, AcceptingPetriNet>, Map<Long, Iterable<Iterable<Double>>>>,
               AcceptingPetriNet>(analyzer, context));
 }
  @Override
  public void doWork() throws OperatorException {
    Model model = modelInput.getData(Model.class);
    if (!(model instanceof KernelModel)) {
      throw new UserError(this, 122, "'support vector based model (kernel model)'");
    }

    PerformanceVector inputPerformance = performanceInput.getDataOrNull(PerformanceVector.class);

    PerformanceVector performance = count((KernelModel) model, inputPerformance);

    modelOutput.deliver(model);
    performanceOutput.deliver(performance);
  }
  @Override
  public void doWork() throws OperatorException {
    CentroidClusterModel model = modelInput.getData(CentroidClusterModel.class);

    Attributes trainAttributes = model.getTrainingHeader().getAttributes();
    String[] attributeNames = model.getAttributeNames();
    Attribute[] attributes = new Attribute[attributeNames.length + 1];
    for (int i = 0; i < attributeNames.length; i++) {
      Attribute originalAttribute = trainAttributes.get(attributeNames[i]);
      attributes[i] =
          AttributeFactory.createAttribute(attributeNames[i], originalAttribute.getValueType());
      if (originalAttribute.isNominal()) {
        attributes[i].setMapping((NominalMapping) originalAttribute.getMapping().clone());
      }
    }
    Attribute clusterAttribute = AttributeFactory.createAttribute("cluster", Ontology.NOMINAL);
    attributes[attributes.length - 1] = clusterAttribute;

    MemoryExampleTable table = new MemoryExampleTable(attributes);
    for (int i = 0; i < model.getNumberOfClusters(); i++) {
      double[] data = new double[attributeNames.length + 1];
      System.arraycopy(model.getCentroidCoordinates(i), 0, data, 0, attributeNames.length);
      data[attributeNames.length] = clusterAttribute.getMapping().mapString("cluster_" + i);
      table.addDataRow(new DoubleArrayDataRow(data));
    }

    ExampleSet resultSet = table.createExampleSet();
    resultSet.getAttributes().setSpecialAttribute(clusterAttribute, Attributes.CLUSTER_NAME);

    modelOutput.deliver(model);
    exampleSetOutput.deliver(resultSet);
  }
Example #12
0
  /** Constructor */
  public RandomO(OperatorDescription description) {
    super(description);

    exampleSetInput.addPrecondition(
        new ExampleSetPrecondition(
            exampleSetInput, "user identification", Ontology.ATTRIBUTE_VALUE));
    exampleSetInput.addPrecondition(
        new ExampleSetPrecondition(
            exampleSetInput, "item identification", Ontology.ATTRIBUTE_VALUE));
    getTransformer()
        .addRule(
            new ExampleSetPassThroughRule(exampleSetInput, exampleSetOutput, SetRelation.EQUAL) {});

    getTransformer()
        .addRule(new GenerateNewMDRule(exampleSetOutput1, new MetaData(ItemRecommender.class)) {});
  }
 @Override
 public void transformMD() {
   for (InputPort inputPort : inputPorts) {
     MetaData metaData = inputPort.getMetaData();
     if (metaData != null) {
       if (metaData instanceof CollectionMetaData) {
         metaData = ((CollectionMetaData) metaData).getElementMetaDataRecursive();
       }
       metaData = metaData.clone();
       metaData.addToHistory(outputPort);
       outputPort.deliverMD(modifyMetaData(metaData));
       return;
     }
   }
   outputPort.deliverMD(null);
 }
Example #14
0
  /** @see com.rapidminer.operator.OperatorChain#doWork() */
  @Override
  public void doWork() throws OperatorException {

    List<Operator> nested = this.getImmediateChildren();
    log.info("This StreamProcess has {} nested operators", nested.size());
    for (Operator op : nested) {
      log.info("  op: {}", op);

      if (op instanceof DataStreamOperator) {
        log.info("Resetting stream-operator {}", op);
        ((DataStreamOperator) op).reset();
      }
    }

    log.info("Starting some work in doWork()");
    ExampleSet exampleSet = input.getData(ExampleSet.class);
    log.info("input is an example set with {} examples", exampleSet.size());
    int i = 0;

    Iterator<Example> it = exampleSet.iterator();
    while (it.hasNext()) {
      Example example = it.next();
      log.info("Processing example {}", i);
      DataObject datum = StreamUtils.wrap(example);
      log.info("Wrapped data-object is: {}", datum);
      dataStream.deliver(datum);
      getSubprocess(0).execute();
      inApplyLoop();
      i++;
    }

    // super.doWork();
    log.info("doWork() is finished.");
  }
  @Override
  public final void doWork() throws OperatorException {
    ExampleSet inputExampleSet = exampleSetInput.getData(ExampleSet.class);
    ExampleSet applySet = null;
    // check for needed copy of original exampleset
    if (originalOutput.isConnected() && writesIntoExistingData()) {
      int type = DataRowFactory.TYPE_DOUBLE_ARRAY;
      if (inputExampleSet.getExampleTable() instanceof MemoryExampleTable) {
        DataRowReader dataRowReader = inputExampleSet.getExampleTable().getDataRowReader();
        if (dataRowReader.hasNext()) {
          type = dataRowReader.next().getType();
        }
      }
      // check if type is supported to be copied
      if (type >= 0) {
        applySet = MaterializeDataInMemory.materializeExampleSet(inputExampleSet, type);
      }
    }

    if (applySet == null) applySet = (ExampleSet) inputExampleSet.clone();

    // we apply on the materialized data, because writing can't take place in views anyway.
    ExampleSet result = apply(applySet);
    originalOutput.deliver(inputExampleSet);
    exampleSetOutput.deliver(result);
  }
  /** Trains a model using an ExampleSet from the input. Uses the method learn(ExampleSet). */
  @Override
  public void doWork() throws OperatorException {
    ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class);

    // some checks
    if (exampleSet.getAttributes().getLabel() == null) {
      throw new UserError(this, 105, new Object[0]);
    }
    if (exampleSet.getAttributes().size() == 0) {
      throw new UserError(this, 106, new Object[0]);
    }

    // check capabilities and produce errors if they are not fulfilled
    CapabilityCheck check =
        new CapabilityCheck(
            this,
            Tools.booleanValue(
                ParameterService.getParameterValue(
                    CapabilityProvider.PROPERTY_RAPIDMINER_GENERAL_CAPABILITIES_WARN),
                true));
    check.checkLearnerCapabilities(this, exampleSet);

    Model model = learn(exampleSet);

    modelOutput.deliver(model);
    exampleSetOutput.deliver(exampleSet);
  }
  private PerformanceVector getPerformance(ExampleSet exampleSet)
      throws OperatorException, MissingIOObjectException {
    innerExampleSetSource.deliver(exampleSet);

    getSubprocess(0).execute();

    return innerPerformanceSink.getData(PerformanceVector.class);
  }
 @SuppressWarnings("deprecation")
 private void autoWire(
     CompatibilityLevel level, InputPorts inputPorts, LinkedList<OutputPort> readyOutputs)
     throws PortException {
   boolean success = false;
   do {
     Set<InputPort> complete = new HashSet<InputPort>();
     for (InputPort in : inputPorts.getAllPorts()) {
       success = false;
       if (!in.isConnected()
           && !complete.contains(in)
           && in.getPorts().getOwner().getOperator().shouldAutoConnect(in)) {
         Iterator<OutputPort> outIterator;
         // TODO: Simon: Does the same in both cases. Check again.
         if (in.simulatesStack()) {
           outIterator = readyOutputs.descendingIterator();
         } else {
           outIterator = readyOutputs.descendingIterator();
         }
         while (outIterator.hasNext()) {
           OutputPort outCandidate = outIterator.next();
           // TODO: Remove shouldAutoConnect() in later versions
           Operator owner = outCandidate.getPorts().getOwner().getOperator();
           if (owner.shouldAutoConnect(outCandidate)) {
             if (outCandidate.getMetaData() != null) {
               if (in.isInputCompatible(outCandidate.getMetaData(), level)) {
                 readyOutputs.remove(outCandidate);
                 outCandidate.connectTo(in);
                 // we cannot continue with the remaining input ports
                 // since connecting may have triggered the creation of new input ports
                 // which would result in undefined behavior and a ConcurrentModificationException
                 success = true;
                 break;
               }
             }
           }
         }
         // no port found.
         complete.add(in);
         if (success) {
           break;
         }
       }
     }
   } while (success);
 }
 @Override
 public MetaData getMetaData() {
   if (inPort != null) {
     return inPort.getMetaData();
   } else {
     return null;
   }
 }
 public ExampleSetMinus(OperatorDescription description) {
   super(description);
   subtrahendInput.addPrecondition(
       new ExampleSetPrecondition(subtrahendInput, Ontology.ATTRIBUTE_VALUE, Attributes.ID_NAME));
   getExampleSetInputPort()
       .addPrecondition(
           new ExampleSetPrecondition(
               getExampleSetInputPort(), Ontology.ATTRIBUTE_VALUE, Attributes.ID_NAME));
 }
  @Override
  public ExampleSet apply(ExampleSet exampleSet) throws OperatorException {
    // recall: difference = minuend - subtrahend
    // but the subtrahend is last on the ioobjects stack, so pop first
    ExampleSet subtrahendSet = subtrahendInput.getData(ExampleSet.class);
    ExampleSet minuendSet = exampleSet;

    subtrahendSet.remapIds();
    minuendSet.remapIds();

    Attribute minuendId = minuendSet.getAttributes().getId();
    Attribute subtrahendId = subtrahendSet.getAttributes().getId();

    // sanity checks
    if ((minuendId == null) || (subtrahendId == null)) {
      throw new UserError(this, 129);
    }
    if (minuendId.getValueType() != subtrahendId.getValueType()) {
      throw new UserError(
          this,
          120,
          new Object[] {
            subtrahendId.getName(),
            Ontology.VALUE_TYPE_NAMES[subtrahendId.getValueType()],
            Ontology.VALUE_TYPE_NAMES[minuendId.getValueType()]
          });
    }

    List<Integer> indices = new LinkedList<>();
    {
      int i = 0;
      for (Example example : minuendSet) {
        double id = example.getValue(minuendId);
        Example subtrahendExample = null;
        if (minuendId.isNominal()) {
          subtrahendExample =
              subtrahendSet.getExampleFromId(
                  subtrahendId.getMapping().getIndex(minuendId.getMapping().mapIndex((int) id)));
        } else {
          subtrahendExample = subtrahendSet.getExampleFromId(id);
        }
        if (subtrahendExample == null) {
          indices.add(i);
        }
        i++;
      }
    }

    int[] indexArray = new int[indices.size()];
    for (int i = 0; i < indices.size(); i++) {
      indexArray[i] = indices.get(i);
    }

    ExampleSet minusSet = new MappedExampleSet(minuendSet, indexArray);
    return minusSet;
  }
  @Override
  public void doWork() throws OperatorException {

    ExampleSet exampleSet = exampleSetInput.getData();

    IEntityMapping user_mapping = new EntityMapping();
    IEntityMapping item_mapping = new EntityMapping();
    IRatings training_data = new Ratings();

    if (exampleSet.getAttributes().getSpecial("user identification") == null) {
      throw new UserError(this, 105);
    }

    if (exampleSet.getAttributes().getSpecial("item identification") == null) {
      throw new UserError(this, 105);
    }

    if (exampleSet.getAttributes().getLabel() == null) {
      throw new UserError(this, 105);
    }

    Attributes Att = exampleSet.getAttributes();
    AttributeRole ur = Att.getRole("user identification");
    Attribute u = ur.getAttribute();
    AttributeRole ir = Att.getRole("item identification");
    Attribute i = ir.getAttribute();
    Attribute ui = Att.getLabel();

    for (Example example : exampleSet) {

      double j = example.getValue(u);
      int uid = user_mapping.ToInternalID((int) j);

      j = example.getValue(i);
      int iid = item_mapping.ToInternalID((int) j);

      double r = example.getValue(ui);
      training_data.Add(uid, iid, r);
    }

    _slopeOne recommendAlg = new _slopeOne();

    recommendAlg.user_mapping = user_mapping;
    recommendAlg.item_mapping = item_mapping;
    recommendAlg.SetMinRating(getParameterAsInt("Min Rating"));
    recommendAlg.SetMaxRating(recommendAlg.GetMinRating() + getParameterAsInt("Range"));

    recommendAlg.SetRatings(training_data);

    recommendAlg.Train();

    exampleSetOutput.deliver(exampleSet);

    exampleSetOutput1.deliver(recommendAlg);
  }
  public CostEvaluator(OperatorDescription description) {
    super(description);

    exampleSetInput.addPrecondition(
        new ExampleSetPrecondition(
            exampleSetInput, Ontology.ATTRIBUTE_VALUE, Attributes.LABEL_NAME));
    exampleSetInput.addPrecondition(
        new ExampleSetPrecondition(
            exampleSetInput, Ontology.ATTRIBUTE_VALUE, Attributes.PREDICTION_NAME));
    getTransformer().addGenerationRule(performanceOutput, PerformanceVector.class);
    getTransformer().addPassThroughRule(exampleSetInput, exampleSetOutput);

    addValue(
        new ValueDouble("costs", "The last costs.") {
          @Override
          public double getDoubleValue() {
            return lastCosts;
          }
        });
  }
 /**
  * Sorts the operators topologically, i.e. such that operator <var>i</var> in the returned
  * ordering has dependencies (i.e. connected {@link InputPort}s) only from operators
  * <var>0..i-1</var>.
  */
 public Vector<Operator> topologicalSort() {
   final Map<Operator, Integer> originalIndices = new HashMap<Operator, Integer>();
   for (int i = 0; i < operators.size(); i++) {
     originalIndices.put(operators.get(i), i);
   }
   EdgeCounter counter = new EdgeCounter(operators);
   for (Operator child : getOperators()) {
     for (OutputPort out : child.getOutputPorts().getAllPorts()) {
       InputPort dest = out.getDestination();
       if (dest != null) {
         counter.incNumEdges(dest.getPorts().getOwner().getOperator());
       }
     }
   }
   Vector<Operator> sorted = new Vector<Operator>();
   PriorityQueue<Operator> independentOperators =
       new PriorityQueue<Operator>(
           Math.max(1, operators.size()),
           new Comparator<Operator>() {
             @Override
             public int compare(Operator o1, Operator o2) {
               return originalIndices.get(o1) - originalIndices.get(o2);
             }
           });
   independentOperators.addAll(counter.getIndependentOperators());
   while (!independentOperators.isEmpty()) {
     Operator first = independentOperators.poll();
     sorted.add(first);
     for (OutputPort out : first.getOutputPorts().getAllPorts()) {
       InputPort dest = out.getDestination();
       if (dest != null) {
         Operator destOp = dest.getPorts().getOwner().getOperator();
         if (counter.decNumEdges(destOp) == 0) {
           // independentOperators.addFirst(destOp);
           independentOperators.add(destOp);
         }
       }
     }
   }
   return sorted;
 }
  @Override
  public void doWork() throws OperatorException {

    // retrieve document
    Document input = documentInput.getData(Document.class);

    // encrypt/decrypt text
    String text = transformText(configureEncryptor(), getText(input));

    // deliver transformed document
    documentOut.deliver(new Document(text));
  }
  public ValueSubgroupIteration(OperatorDescription description) {
    super(description, "Subset Processing");

    exampleSetInput.addPrecondition(
        new AttributeSetPrecondition(
            exampleSetInput,
            new AttributeNameProvider() {
              @Override
              public String[] getRequiredAttributeNames() {
                try {
                  List<String[]> attributeValueOptions;
                  attributeValueOptions = getParameterList(PARAMETER_ATTRIBUTES);
                  String[] groupAttributes = new String[attributeValueOptions.size()];
                  int i = 0;
                  for (String[] pair : attributeValueOptions) {
                    groupAttributes[i] = pair[0];
                    i++;
                  }
                  return groupAttributes;
                } catch (UndefinedParameterError e) {
                  return new String[0];
                }
              }
            }));

    getTransformer()
        .addRule(
            new ExampleSetPassThroughRule(
                exampleSetInput, innerExampleSetSource, SetRelation.SUPERSET) {
              @Override
              public ExampleSetMetaData modifyExampleSet(ExampleSetMetaData emd)
                  throws UndefinedParameterError {
                emd.getNumberOfExamples().reduceByUnknownAmount();
                if (getParameterAsBoolean(PARAMETER_FILTER_ATTRIBUTE)) {
                  List<String[]> attributeValueOptions;
                  attributeValueOptions = getParameterList(PARAMETER_ATTRIBUTES);
                  String[] groupAttributes = new String[attributeValueOptions.size()];
                  int i = 0;
                  for (String[] pair : attributeValueOptions) {
                    groupAttributes[i] = pair[0];
                    i++;
                  }

                  for (String name : groupAttributes) {
                    AttributeMetaData amd = emd.getAttributeByName(name);
                    if (amd != null) emd.removeAttribute(emd.getAttributeByName(name));
                  }
                }
                return emd;
              }
            });
    getTransformer().addRule(new SubprocessTransformRule(getSubprocess(0)));
  }
  public ThresholdFinder(OperatorDescription description) {
    super(description);

    exampleSetInput.addPrecondition(
        new ExampleSetPrecondition(
            exampleSetInput,
            Ontology.VALUE_TYPE,
            Attributes.LABEL_NAME,
            Attributes.PREDICTION_NAME,
            Attributes.CONFIDENCE_NAME));
    getTransformer().addPassThroughRule(exampleSetInput, exampleSetOutput);
    getTransformer().addGenerationRule(thresholdOutput, Threshold.class);
  }
  @Override
  public void doWork() throws OperatorException {
    ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class);

    // only use numeric attributes
    Tools.onlyNumericalAttributes(exampleSet, "KernelPCA");
    Tools.onlyNonMissingValues(exampleSet, getOperatorClassName(), this);

    Attributes attributes = exampleSet.getAttributes();
    int numberOfExamples = exampleSet.size();

    // calculating means for later zero centering
    exampleSet.recalculateAllAttributeStatistics();
    double[] means = new double[exampleSet.getAttributes().size()];
    int i = 0;
    for (Attribute attribute : exampleSet.getAttributes()) {
      means[i] = exampleSet.getStatistics(attribute, Statistics.AVERAGE);
      i++;
    }

    // kernel
    Kernel kernel = Kernel.createKernel(this);

    // copying zero centered exampleValues
    ArrayList<double[]> exampleValues = new ArrayList<double[]>(numberOfExamples);
    i = 0;
    for (Example columnExample : exampleSet) {
      double[] columnValues = getAttributeValues(columnExample, attributes, means);
      exampleValues.add(columnValues);
      i++;
    }

    // filling kernel matrix
    Matrix kernelMatrix = new Matrix(numberOfExamples, numberOfExamples);
    for (i = 0; i < numberOfExamples; i++) {
      for (int j = 0; j < numberOfExamples; j++) {
        kernelMatrix.set(
            i, j, kernel.calculateDistance(exampleValues.get(i), exampleValues.get(j)));
      }
    }

    // calculating eigenVectors
    EigenvalueDecomposition eig = kernelMatrix.eig();
    Model model = new KernelPCAModel(exampleSet, means, eig.getV(), exampleValues, kernel);

    if (exampleSetOutput.isConnected()) {
      exampleSetOutput.deliver(model.apply(exampleSet));
    }
    originalOutput.deliver(exampleSet);
    modelOutput.deliver(model);
  }
  @Override
  public void apply() {
    try {
      Operator oldOperator = inputPort.getPorts().getOwner().getOperator();
      Operator newOperator =
          NewOperatorDialog.selectMatchingOperator(
              RapidMinerGUI.getMainFrame().getActions(), null, neededClass, null, null);

      if (newOperator != null) {
        ExecutionUnit unit = inputPort.getPorts().getOwner().getConnectionContext();
        int index = unit.getIndexOfOperator(oldOperator);
        if (index == -1) {
          unit.addOperator(newOperator);
        } else {
          unit.addOperator(newOperator, unit.getIndexOfOperator(oldOperator));
        }
        if (RapidMinerGUI.getMainFrame().VALIDATE_AUTOMATICALLY_ACTION.isSelected()) {
          unit.autoWireSingle(newOperator, CompatibilityLevel.VERSION_5, true, true);
        }
      }
    } catch (OperatorCreationException e) {
    }
  }
  @Override
  public void doWork() throws OperatorException {
    ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class);
    // needed for some measures
    Tools.checkAndCreateIds(exampleSet);

    DistanceMeasure measure = measureHelper.getInitializedMeasure(exampleSet);
    SimilarityMeasureObject measureObject = new SimilarityMeasureObject(measure, exampleSet);

    ObjectVisualizerService.addObjectVisualizer(measureObject, new ExampleVisualizer(exampleSet));

    similarityOutput.deliver(measureObject);
    exampleSetOutput.deliver(exampleSet);
  }