private static ProtobufMessages.NodeMemory writeRIANodeMemory(
      final int nodeId, final Memory memory) {
    // for RIA nodes, we need to store the ID of the created handles
    RIAMemory mem = (RIAMemory) memory;
    if (!mem.memory.isEmpty()) {
      ProtobufMessages.NodeMemory.RIANodeMemory.Builder _ria =
          ProtobufMessages.NodeMemory.RIANodeMemory.newBuilder();

      final org.drools.core.util.Iterator it = mem.memory.iterator();
      // iterates over all propagated handles and assert them to the new sink
      for (ObjectEntry entry = (ObjectEntry) it.next();
          entry != null;
          entry = (ObjectEntry) it.next()) {
        LeftTuple leftTuple = (LeftTuple) entry.getKey();
        InternalFactHandle handle = (InternalFactHandle) entry.getValue();
        FactHandle _handle =
            ProtobufMessages.FactHandle.newBuilder()
                .setId(handle.getId())
                .setRecency(handle.getRecency())
                .build();
        _ria.addContext(
            ProtobufMessages.NodeMemory.RIANodeMemory.RIAContext.newBuilder()
                .setTuple(PersisterHelper.createTuple(leftTuple))
                .setResultHandle(_handle)
                .build());
      }

      return ProtobufMessages.NodeMemory.newBuilder()
          .setNodeId(nodeId)
          .setNodeType(ProtobufMessages.NodeMemory.NodeType.RIA)
          .setRia(_ria.build())
          .build();
    }
    return null;
  }
  public BaseNode getMatchingNode(BaseNode candidate) {
    if (this.otherSinks != null) {
      for (ObjectSinkNode sink = this.otherSinks.getFirst();
          sink != null;
          sink = sink.getNextObjectSinkNode()) {
        if (sink.thisNodeEquals(candidate)) {
          return (BaseNode) sink;
        }
      }
    }

    if (this.hashableSinks != null) {
      for (ObjectSinkNode sink = this.hashableSinks.getFirst();
          sink != null;
          sink = sink.getNextObjectSinkNode()) {
        if (sink.thisNodeEquals(candidate)) {
          return (BaseNode) sink;
        }
      }
    }

    if (this.hashedSinkMap != null) {
      final Iterator it = this.hashedSinkMap.newIterator();
      for (ObjectEntry entry = (ObjectEntry) it.next();
          entry != null;
          entry = (ObjectEntry) it.next()) {
        final ObjectSink sink = (ObjectSink) entry.getValue();
        if (sink.thisNodeEquals(candidate)) {
          return (BaseNode) sink;
        }
      }
    }
    return null;
  }
  protected void doRemove(
      final RuleRemovalContext context,
      final ReteooBuilder builder,
      final BaseNode node,
      final InternalWorkingMemory[] workingMemories) {
    if (!node.isInUse()) {
      removeObjectSink((ObjectSink) node);
    }

    if (!this.isInUse()) {
      for (InternalWorkingMemory workingMemory : workingMemories) {
        ObjectHashMap memory = (ObjectHashMap) workingMemory.getNodeMemory(this);

        Iterator it = memory.iterator();
        for (ObjectEntry entry = (ObjectEntry) it.next();
            entry != null;
            entry = (ObjectEntry) it.next()) {
          LeftTuple leftTuple = (LeftTuple) entry.getKey();
          leftTuple.unlinkFromLeftParent();
          leftTuple.unlinkFromRightParent();
        }
        workingMemory.clearNodeMemory(this);
      }
    }
    this.tupleSource.remove(context, builder, this, workingMemories);
  }
Example #4
0
  public void updateSink(
      final ObjectSink sink,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {
    // @todo
    // JBRULES-612: the cache MUST be invalidated when a new node type is added to the network, so
    // iterate and reset all caches.
    final ObjectTypeNode node = (ObjectTypeNode) sink;

    final ObjectType newObjectType = node.getObjectType();

    InternalWorkingMemoryEntryPoint wmEntryPoint =
        (InternalWorkingMemoryEntryPoint)
            workingMemory.getWorkingMemoryEntryPoint(this.entryPoint.getEntryPointId());

    for (ObjectTypeConf objectTypeConf :
        wmEntryPoint.getObjectTypeConfigurationRegistry().values()) {
      if (newObjectType.isAssignableFrom(
          objectTypeConf.getConcreteObjectTypeNode().getObjectType())) {
        objectTypeConf.resetCache();
        ObjectTypeNode sourceNode = objectTypeConf.getConcreteObjectTypeNode();
        Iterator it =
            ((ObjectTypeNodeMemory) workingMemory.getNodeMemory(sourceNode)).memory.iterator();
        for (ObjectEntry entry = (ObjectEntry) it.next();
            entry != null;
            entry = (ObjectEntry) it.next()) {
          sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory);
        }
      }
    }
  }
Example #5
0
 /**
  * OTN needs to override remove to avoid releasing the node ID, since OTN are never removed from
  * the rulebase in the current implementation
  */
 protected void doRemove(
     final RuleRemovalContext context,
     final ReteooBuilder builder,
     final BaseNode node,
     final InternalWorkingMemory[] workingMemories) {
   if (context.getCleanupAdapter() != null) {
     for (InternalWorkingMemory workingMemory : workingMemories) {
       CleanupAdapter adapter = context.getCleanupAdapter();
       final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this);
       Iterator it = memory.iterator();
       for (ObjectEntry entry = (ObjectEntry) it.next();
           entry != null;
           entry = (ObjectEntry) it.next()) {
         InternalFactHandle handle = (InternalFactHandle) entry.getValue();
         for (LeftTuple leftTuple = handle.getFirstLeftTuple();
             leftTuple != null;
             leftTuple = leftTuple.getLeftParentNext()) {
           adapter.cleanUp(leftTuple, workingMemory);
         }
       }
     }
     context.setCleanupAdapter(null);
   }
   if (!node.isInUse()) {
     removeObjectSink((ObjectSink) node);
   }
 }
Example #6
0
  /**
   * TMS will be automatically enabled when the first logical insert happens.
   *
   * <p>We will take all the already asserted objects of the same type and initialize the equality
   * map.
   *
   * @param object the logically inserted object.
   * @param conf the type's configuration.
   */
  private void enableTMS(Object object, ObjectTypeConf conf) {

    final Rete source = this.ruleBase.getRete();
    final ClassObjectType cot = new ClassObjectType(object.getClass());
    final Map<ObjectType, ObjectTypeNode> map = source.getObjectTypeNodes(EntryPoint.DEFAULT);
    final ObjectTypeNode node = map.get(cot);
    final ObjectHashSet memory = ((ObjectTypeNodeMemory) this.wm.getNodeMemory(node)).memory;

    // All objects of this type that are already there were certainly stated,
    // since this method call happens at the first logical insert, for any given type.
    org.drools.core.util.Iterator it = memory.iterator();

    for (Object obj = it.next(); obj != null; obj = it.next()) {

      org.drools.core.util.ObjectHashSet.ObjectEntry holder =
          (org.drools.core.util.ObjectHashSet.ObjectEntry) obj;

      InternalFactHandle handle = (InternalFactHandle) holder.getValue();

      if (handle != null) {
        EqualityKey key = createEqualityKey(handle);
        key.setStatus(EqualityKey.STATED);
        this.wm.getTruthMaintenanceSystem().put(key);
      }
    }

    // Enable TMS for this type.
    conf.enableTMS();
  }
Example #7
0
  /**
   * When L&R Unlinking is enabled, updateSink() is used to populate a node's memory, but it has to
   * take into account if it's propagating.
   */
  private void updateLRUnlinking(
      final ObjectSink sink,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {

    final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this);

    Iterator it = memory.iterator();

    InternalFactHandle ctxHandle = (InternalFactHandle) context.getFactHandle();

    if (!context.isPropagating(this)
        || (context.isPropagating(this) && context.shouldPropagateAll())) {

      for (ObjectEntry entry = (ObjectEntry) it.next();
          entry != null;
          entry = (ObjectEntry) it.next()) {
        // Assert everything
        sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory);
      }

    } else {

      for (ObjectEntry entry = (ObjectEntry) it.next();
          entry != null;
          entry = (ObjectEntry) it.next()) {
        InternalFactHandle handle = (InternalFactHandle) entry.getValue();
        // Exclude the current fact propagation
        if (handle.getId() != ctxHandle.getId()) {
          sink.assertObject(handle, context, workingMemory);
        }
      }
    }
  }
  public void updateSink(
      final ObjectSink sink,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {
    BetaNode betaNode = (BetaNode) this.sink.getSinks()[0];

    Memory betaMemory = workingMemory.getNodeMemory(betaNode);
    BetaMemory bm;
    if (betaNode.getType() == NodeTypeEnums.AccumulateNode) {
      bm = ((AccumulateMemory) betaMemory).getBetaMemory();
    } else {
      bm = (BetaMemory) betaMemory;
    }

    // for RIA nodes, we need to store the ID of the created handles
    bm.getRightTupleMemory().iterator();
    if (bm.getRightTupleMemory().size() > 0) {
      final org.drools.core.util.Iterator it = bm.getRightTupleMemory().iterator();
      for (RightTuple entry = (RightTuple) it.next();
          entry != null;
          entry = (RightTuple) it.next()) {
        LeftTuple leftTuple = (LeftTuple) entry.getFactHandle().getObject();
        InternalFactHandle handle = (InternalFactHandle) leftTuple.getObject();
        sink.assertObject(handle, context, workingMemory);
      }
    }
  }
Example #9
0
 public void dispose() {
   if (dynamicFacts != null) {
     // first we check for facts that were inserted into the working memory
     // using the old API and setting a per instance dynamic flag and remove the
     // session from the listeners list in the bean
     for (InternalFactHandle handle : dynamicFacts) {
       removePropertyChangeListener(handle, false);
     }
     dynamicFacts = null;
   }
   for (ObjectTypeConf conf : this.typeConfReg.values()) {
     // then, we check if any of the object types were configured using the
     // @propertyChangeSupport annotation, and clean them up
     if (conf.isDynamic() && conf.isSupportsPropertyChangeListeners()) {
       // it is enough to iterate the facts on the concrete object type nodes
       // only, as the facts will always be in their concrete object type nodes
       // even if they were also asserted into higher level OTNs as well
       ObjectTypeNode otn = conf.getConcreteObjectTypeNode();
       final ObjectHashSet memory =
           ((ObjectTypeNodeMemory) this.getInternalWorkingMemory().getNodeMemory(otn)).memory;
       Iterator it = memory.iterator();
       for (ObjectEntry entry = (ObjectEntry) it.next();
           entry != null;
           entry = (ObjectEntry) it.next()) {
         InternalFactHandle handle = (InternalFactHandle) entry.getValue();
         removePropertyChangeListener(handle, false);
       }
     }
   }
 }
  private static ProtobufMessages.NodeMemory writeRIANodeMemory(
      final int nodeId,
      final MarshallerWriteContext context,
      final BaseNode node,
      final NodeMemories memories,
      final Memory memory) {
    RightInputAdapterNode riaNode = (RightInputAdapterNode) node;

    ObjectSink[] sinks = riaNode.getSinkPropagator().getSinks();
    BetaNode betaNode = (BetaNode) sinks[0];

    Memory betaMemory = memories.peekNodeMemory(betaNode.getId());
    if (betaMemory == null) {
      return null;
    }
    BetaMemory bm;
    if (betaNode.getType() == NodeTypeEnums.AccumulateNode) {
      bm = ((AccumulateMemory) betaMemory).getBetaMemory();
    } else {
      bm = (BetaMemory) betaMemory;
    }

    // for RIA nodes, we need to store the ID of the created handles
    bm.getRightTupleMemory().iterator();
    if (bm.getRightTupleMemory().size() > 0) {
      ProtobufMessages.NodeMemory.RIANodeMemory.Builder _ria =
          ProtobufMessages.NodeMemory.RIANodeMemory.newBuilder();
      final org.drools.core.util.Iterator it = bm.getRightTupleMemory().iterator();

      // iterates over all propagated handles and assert them to the new sink
      for (RightTuple entry = (RightTuple) it.next();
          entry != null;
          entry = (RightTuple) it.next()) {
        LeftTuple leftTuple = (LeftTuple) entry.getFactHandle().getObject();
        InternalFactHandle handle = (InternalFactHandle) leftTuple.getObject();
        FactHandle _handle =
            ProtobufMessages.FactHandle.newBuilder()
                .setId(handle.getId())
                .setRecency(handle.getRecency())
                .build();
        _ria.addContext(
            ProtobufMessages.NodeMemory.RIANodeMemory.RIAContext.newBuilder()
                .setTuple(PersisterHelper.createTuple(leftTuple))
                .setResultHandle(_handle)
                .build());
      }

      return ProtobufMessages.NodeMemory.newBuilder()
          .setNodeId(nodeId)
          .setNodeType(ProtobufMessages.NodeMemory.NodeType.RIA)
          .setRia(_ria.build())
          .build();
    }
    return null;
  }
  public static void writeTruthMaintenanceSystem(
      MarshallerWriteContext context, EntryPoint wmep, ProtobufMessages.EntryPoint.Builder _epb)
      throws IOException {
    TruthMaintenanceSystem tms = ((NamedEntryPoint) wmep).getTruthMaintenanceSystem();
    ObjectHashMap justifiedMap = tms.getEqualityKeyMap();

    if (!justifiedMap.isEmpty()) {
      EqualityKey[] keys = new EqualityKey[justifiedMap.size()];
      org.drools.core.util.Iterator it = justifiedMap.iterator();
      int i = 0;
      for (org.drools.core.util.ObjectHashMap.ObjectEntry entry =
              (org.drools.core.util.ObjectHashMap.ObjectEntry) it.next();
          entry != null;
          entry = (org.drools.core.util.ObjectHashMap.ObjectEntry) it.next()) {
        EqualityKey key = (EqualityKey) entry.getKey();
        keys[i++] = key;
      }

      Arrays.sort(keys, EqualityKeySorter.instance);

      ProtobufMessages.TruthMaintenanceSystem.Builder _tms =
          ProtobufMessages.TruthMaintenanceSystem.newBuilder();

      // write the assert map of Equality keys
      for (EqualityKey key : keys) {
        ProtobufMessages.EqualityKey.Builder _key = ProtobufMessages.EqualityKey.newBuilder();
        _key.setStatus(key.getStatus());
        _key.setHandleId(key.getFactHandle().getId());

        if (key.size() > 1) {
          // add all the other key's if they exist
          FastIterator keyIter = key.fastIterator();
          for (DefaultFactHandle handle = key.getFirst().getNext();
              handle != null;
              handle = (DefaultFactHandle) keyIter.next(handle)) {
            _key.addOtherHandle(handle.getId());
          }
        }

        if (key.getBeliefSet() != null) {
          writeBeliefSet(context, key.getBeliefSet(), _key);
        }

        _tms.addKey(_key.build());
      }

      _epb.setTms(_tms.build());
    }
  }
  public void updateSink(
      final ObjectSink sink,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {

    final ObjectHashMap memory = (ObjectHashMap) workingMemory.getNodeMemory(this);

    final Iterator it = memory.iterator();

    // iterates over all propagated handles and assert them to the new sink
    for (ObjectEntry entry = (ObjectEntry) it.next();
        entry != null;
        entry = (ObjectEntry) it.next()) {
      sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory);
    }
  }
  public ObjectSink[] getSinks() {
    if (this.sinks != null) {
      return sinks;
    }
    ObjectSink[] sinks = new ObjectSink[size()];
    int at = 0;

    if (this.hashedFieldIndexes != null) {
      // Iterate the FieldIndexes to see if any are hashed
      for (FieldIndex fieldIndex = this.hashedFieldIndexes.getFirst();
          fieldIndex != null;
          fieldIndex = fieldIndex.getNext()) {
        if (!fieldIndex.isHashed()) {
          continue;
        }
        // this field is hashed so set the existing hashKey and see if there is a sink for it
        final int index = fieldIndex.getIndex();
        final Iterator it = this.hashedSinkMap.newIterator();
        for (ObjectEntry entry = (ObjectEntry) it.next();
            entry != null;
            entry = (ObjectEntry) it.next()) {
          HashKey hashKey = (HashKey) entry.getKey();
          if (hashKey.getIndex() == index) {
            sinks[at++] = (ObjectSink) entry.getValue();
          }
        }
      }
    }

    if (this.hashableSinks != null) {
      for (ObjectSinkNode sink = this.hashableSinks.getFirst();
          sink != null;
          sink = sink.getNextObjectSinkNode()) {
        sinks[at++] = sink;
      }
    }

    if (this.otherSinks != null) {
      for (ObjectSinkNode sink = this.otherSinks.getFirst();
          sink != null;
          sink = sink.getNextObjectSinkNode()) {
        sinks[at++] = sink;
      }
    }
    this.sinks = sinks;
    return sinks;
  }
  private static ProtobufMessages.NodeMemory writeQueryElementNodeMemory(
      final int nodeId, final Memory memory, final InternalWorkingMemory wm) {
    org.drools.core.util.Iterator<LeftTuple> it =
        LeftTupleIterator.iterator(wm, ((QueryElementNodeMemory) memory).getNode());

    ProtobufMessages.NodeMemory.QueryElementNodeMemory.Builder _query =
        ProtobufMessages.NodeMemory.QueryElementNodeMemory.newBuilder();
    for (LeftTuple leftTuple = it.next(); leftTuple != null; leftTuple = it.next()) {
      InternalFactHandle handle = (InternalFactHandle) leftTuple.getObject();
      FactHandle _handle =
          ProtobufMessages.FactHandle.newBuilder()
              .setId(handle.getId())
              .setRecency(handle.getRecency())
              .build();

      ProtobufMessages.NodeMemory.QueryElementNodeMemory.QueryContext.Builder _context =
          ProtobufMessages.NodeMemory.QueryElementNodeMemory.QueryContext.newBuilder()
              .setTuple(PersisterHelper.createTuple(leftTuple))
              .setHandle(_handle);

      LeftTuple childLeftTuple = leftTuple.getFirstChild();
      while (childLeftTuple != null) {
        RightTuple rightParent = childLeftTuple.getRightParent();
        _context.addResult(
            ProtobufMessages.FactHandle.newBuilder()
                .setId(rightParent.getFactHandle().getId())
                .setRecency(rightParent.getFactHandle().getRecency())
                .build());
        while (childLeftTuple != null && childLeftTuple.getRightParent() == rightParent) {
          // skip to the next child that has a different right parent
          childLeftTuple = childLeftTuple.getLeftParentNext();
        }
      }
      _query.addContext(_context.build());
    }

    return _query.getContextCount() > 0
        ? ProtobufMessages.NodeMemory.newBuilder()
            .setNodeId(nodeId)
            .setNodeType(ProtobufMessages.NodeMemory.NodeType.QUERY_ELEMENT)
            .setQueryElement(_query.build())
            .build()
        : null;
  }
Example #15
0
  public void updateSink(
      final ObjectSink sink,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {
    if (lrUnlinkingEnabled) {
      // Update sink taking into account L&R unlinking peculiarities
      updateLRUnlinking(sink, context, workingMemory);

    } else {
      // Regular updateSink
      final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this);
      Iterator it = memory.iterator();

      for (ObjectEntry entry = (ObjectEntry) it.next();
          entry != null;
          entry = (ObjectEntry) it.next()) {
        sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory);
      }
    }
  }
Example #16
0
  /** Updates the given sink propagating all previously propagated tuples to it */
  public void updateSink(
      final LeftTupleSink sink,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {
    final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory(this);
    Iterator it = memory.getRightTupleMemory().iterator();

    // Relies on the fact that any propagated LeftTuples are blocked, but due to lazy blocking
    // they will only be blocked once. So we can iterate the right memory to find the left tuples to
    // propagate
    for (RightTuple rightTuple = (RightTuple) it.next();
        rightTuple != null;
        rightTuple = (RightTuple) it.next()) {
      LeftTuple leftTuple = rightTuple.getBlocked();
      while (leftTuple != null) {
        sink.assertLeftTuple(sink.createLeftTuple(leftTuple, sink, true), context, workingMemory);
        leftTuple = leftTuple.getBlockedNext();
      }
    }
  }
Example #17
0
  public void updateSink(
      final LeftTupleSink sink,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {
    final AccumulateMemory memory = (AccumulateMemory) workingMemory.getNodeMemory(this);

    final Iterator tupleIter = memory.betaMemory.getLeftTupleMemory().iterator();
    for (LeftTuple leftTuple = (LeftTuple) tupleIter.next();
        leftTuple != null;
        leftTuple = (LeftTuple) tupleIter.next()) {
      AccumulateContext accctx = (AccumulateContext) leftTuple.getObject();
      if (accctx.propagated) {
        // temporarily break the linked list to avoid wrong interactions
        LeftTuple[] matchings = splitList(leftTuple, accctx, true);
        sink.assertLeftTuple(
            sink.createLeftTuple(leftTuple, accctx.result, null, null, sink, true),
            context,
            workingMemory);
        restoreList(leftTuple, matchings);
      }
    }
  }
  void unHashSinks(final FieldIndex fieldIndex) {
    final int index = fieldIndex.getIndex();
    // this is the list of sinks that need to be removed from the hashedSinkMap
    final List<HashKey> unhashedSinks = new ArrayList<HashKey>();

    final Iterator iter = this.hashedSinkMap.newIterator();
    ObjectHashMap.ObjectEntry entry = (ObjectHashMap.ObjectEntry) iter.next();

    while (entry != null) {
      final AlphaNode alphaNode = (AlphaNode) entry.getValue();
      final IndexableConstraint indexableConstraint =
          (IndexableConstraint) alphaNode.getConstraint();

      // only alpha nodes that have an Operator.EQUAL are in sinks, so only check if it is
      // the right field index
      if (index == indexableConstraint.getFieldExtractor().getIndex()) {
        final FieldValue value = indexableConstraint.getField();
        if (this.hashableSinks == null) {
          this.hashableSinks = new ObjectSinkNodeList();
        }
        this.hashableSinks.add(alphaNode);

        unhashedSinks.add(new HashKey(index, value, fieldIndex.getFieldExtractor()));
      }

      entry = (ObjectHashMap.ObjectEntry) iter.next();
    }

    for (HashKey hashKey : unhashedSinks) {
      this.hashedSinkMap.remove(hashKey);
    }

    if (this.hashedSinkMap.isEmpty()) {
      this.hashedSinkMap = null;
    }

    fieldIndex.setHashed(false);
  }
Example #19
0
  private ActivationIterator(InternalWorkingMemory wm, KnowledgeBase kbase) {
    this.wm = wm;

    nodeIter = TerminalNodeIterator.iterator(kbase);

    // Find the first node with Activations an set it.
    while (currentLeftTuple == null && (node = (TerminalNode) nodeIter.next()) != null) {
      if (!(node instanceof RuleTerminalNode)) {
        continue;
      }
      leftTupleIter = LeftTupleIterator.iterator(wm, node);
      this.currentLeftTuple = (LeftTuple) leftTupleIter.next();
    }
  }
  public void removeMemory(InternalWorkingMemory workingMemory) {
    BetaNode betaNode = (BetaNode) this.sink.getSinks()[0];

    Memory betaMemory = workingMemory.getNodeMemory(betaNode);
    BetaMemory bm;
    if (betaNode.getType() == NodeTypeEnums.AccumulateNode) {
      bm = ((AccumulateMemory) betaMemory).getBetaMemory();
    } else {
      bm = (BetaMemory) betaMemory;
    }

    if (bm.getRightTupleMemory().size() > 0) {
      final Iterator it = bm.getRightTupleMemory().iterator();
      for (RightTuple entry = (RightTuple) it.next();
          entry != null;
          entry = (RightTuple) it.next()) {
        LeftTuple leftTuple = (LeftTuple) entry.getFactHandle().getObject();
        leftTuple.unlinkFromLeftParent();
        leftTuple.unlinkFromRightParent();
      }
    }
    workingMemory.clearNodeMemory(this);
  }
Example #21
0
  public static void writeTruthMaintenanceSystem(MarshallerWriteContext context)
      throws IOException {
    ObjectOutputStream stream = context.stream;

    ObjectHashMap assertMap = context.wm.getTruthMaintenanceSystem().getAssertMap();

    EqualityKey[] keys = new EqualityKey[assertMap.size()];
    org.drools.core.util.Iterator it = assertMap.iterator();
    int i = 0;
    for (org.drools.core.util.ObjectHashMap.ObjectEntry entry =
            (org.drools.core.util.ObjectHashMap.ObjectEntry) it.next();
        entry != null;
        entry = (org.drools.core.util.ObjectHashMap.ObjectEntry) it.next()) {
      EqualityKey key = (EqualityKey) entry.getKey();
      keys[i++] = key;
    }

    Arrays.sort(keys, EqualityKeySorter.instance);

    // write the assert map of Equality keys
    for (EqualityKey key : keys) {
      stream.writeShort(PersisterEnums.EQUALITY_KEY);
      stream.writeInt(key.getStatus());
      InternalFactHandle handle = key.getFactHandle();
      stream.writeInt(handle.getId());
      // context.out.println( "EqualityKey int:" + key.getStatus() + " int:" + handle.getId() );
      if (key.getOtherFactHandle() != null && !key.getOtherFactHandle().isEmpty()) {
        for (InternalFactHandle handle2 : key.getOtherFactHandle()) {
          stream.writeShort(PersisterEnums.FACT_HANDLE);
          stream.writeInt(handle2.getId());
          // context.out.println( "OtherHandle int:" + handle2.getId() );
        }
      }
      stream.writeShort(PersisterEnums.END);
    }
    stream.writeShort(PersisterEnums.END);
  }
  @SuppressWarnings("unchecked")
  private static ProtobufMessages.NodeMemory writeFromNodeMemory(
      final int nodeId, final Memory memory) {
    FromMemory fromMemory = (FromMemory) memory;

    if (fromMemory.betaMemory.getLeftTupleMemory().size() > 0) {
      ProtobufMessages.NodeMemory.FromNodeMemory.Builder _from =
          ProtobufMessages.NodeMemory.FromNodeMemory.newBuilder();

      final org.drools.core.util.Iterator tupleIter =
          fromMemory.betaMemory.getLeftTupleMemory().iterator();
      for (LeftTuple leftTuple = (LeftTuple) tupleIter.next();
          leftTuple != null;
          leftTuple = (LeftTuple) tupleIter.next()) {
        Map<Object, RightTuple> matches = (Map<Object, RightTuple>) leftTuple.getObject();
        ProtobufMessages.NodeMemory.FromNodeMemory.FromContext.Builder _context =
            ProtobufMessages.NodeMemory.FromNodeMemory.FromContext.newBuilder()
                .setTuple(PersisterHelper.createTuple(leftTuple));
        for (RightTuple rightTuple : matches.values()) {
          FactHandle _handle =
              ProtobufMessages.FactHandle.newBuilder()
                  .setId(rightTuple.getFactHandle().getId())
                  .setRecency(rightTuple.getFactHandle().getRecency())
                  .build();
          _context.addHandle(_handle);
        }
        _from.addContext(_context.build());
      }

      return ProtobufMessages.NodeMemory.newBuilder()
          .setNodeId(nodeId)
          .setNodeType(ProtobufMessages.NodeMemory.NodeType.FROM)
          .setFrom(_from.build())
          .build();
    }
    return null;
  }
  private static ProtobufMessages.NodeMemory writeAccumulateNodeMemory(
      final int nodeId, final Memory memory) {
    // for accumulate nodes, we need to store the ID of created (result) handles
    AccumulateMemory accmem = (AccumulateMemory) memory;
    if (accmem.betaMemory.getLeftTupleMemory().size() > 0) {
      ProtobufMessages.NodeMemory.AccumulateNodeMemory.Builder _accumulate =
          ProtobufMessages.NodeMemory.AccumulateNodeMemory.newBuilder();

      final org.drools.core.util.Iterator tupleIter =
          accmem.betaMemory.getLeftTupleMemory().iterator();
      for (LeftTuple leftTuple = (LeftTuple) tupleIter.next();
          leftTuple != null;
          leftTuple = (LeftTuple) tupleIter.next()) {
        AccumulateContext accctx = (AccumulateContext) leftTuple.getObject();
        if (accctx.result != null) {
          FactHandle _handle =
              ProtobufMessages.FactHandle.newBuilder()
                  .setId(accctx.result.getFactHandle().getId())
                  .setRecency(accctx.result.getFactHandle().getRecency())
                  .build();
          _accumulate.addContext(
              ProtobufMessages.NodeMemory.AccumulateNodeMemory.AccumulateContext.newBuilder()
                  .setTuple(PersisterHelper.createTuple(leftTuple))
                  .setResultHandle(_handle)
                  .build());
        }
      }

      return ProtobufMessages.NodeMemory.newBuilder()
          .setNodeId(nodeId)
          .setNodeType(ProtobufMessages.NodeMemory.NodeType.ACCUMULATE)
          .setAccumulate(_accumulate.build())
          .build();
    }
    return null;
  }
Example #24
0
  public Object next() {
    Activation acc = null;
    if (this.currentLeftTuple != null) {
      acc = (Activation) currentLeftTuple.getObject();
      this.currentLeftTuple = (LeftTuple) leftTupleIter.next();

      while (currentLeftTuple == null && (node = (TerminalNode) nodeIter.next()) != null) {
        if (!(node instanceof RuleTerminalNode)) {
          continue;
        }
        leftTupleIter = LeftTupleIterator.iterator(wm, node);
        this.currentLeftTuple = (LeftTuple) leftTupleIter.next();
      }
    }

    return acc;
  }
  private static void writeAgenda(
      MarshallerWriteContext context, ProtobufMessages.RuleData.Builder _ksb) throws IOException {
    InternalWorkingMemory wm = context.wm;
    InternalAgenda agenda = (InternalAgenda) wm.getAgenda();

    org.drools.core.marshalling.impl.ProtobufMessages.Agenda.Builder _ab =
        ProtobufMessages.Agenda.newBuilder();

    AgendaGroup[] agendaGroups =
        (AgendaGroup[])
            agenda
                .getAgendaGroupsMap()
                .values()
                .toArray(new AgendaGroup[agenda.getAgendaGroupsMap().size()]);
    Arrays.sort(agendaGroups, AgendaGroupSorter.instance);
    for (AgendaGroup ag : agendaGroups) {
      AgendaGroupQueueImpl group = (AgendaGroupQueueImpl) ag;
      org.drools.core.marshalling.impl.ProtobufMessages.Agenda.AgendaGroup.Builder _agb =
          ProtobufMessages.Agenda.AgendaGroup.newBuilder();
      _agb.setName(group.getName())
          .setIsActive(group.isActive())
          .setIsAutoDeactivate(group.isAutoDeactivate())
          .setClearedForRecency(group.getClearedForRecency())
          .setHasRuleFlowLister(group.isRuleFlowListener())
          .setActivatedForRecency(group.getActivatedForRecency());

      Map<Long, String> nodeInstances = group.getNodeInstances();
      for (Map.Entry<Long, String> entry : nodeInstances.entrySet()) {
        org.drools.core.marshalling.impl.ProtobufMessages.Agenda.AgendaGroup.NodeInstance.Builder
            _nib = ProtobufMessages.Agenda.AgendaGroup.NodeInstance.newBuilder();
        _nib.setProcessInstanceId(entry.getKey());
        _nib.setNodeInstanceId(entry.getValue());
        _agb.addNodeInstance(_nib.build());
      }

      _ab.addAgendaGroup(_agb.build());
    }

    org.drools.core.marshalling.impl.ProtobufMessages.Agenda.FocusStack.Builder _fsb =
        ProtobufMessages.Agenda.FocusStack.newBuilder();
    LinkedList<AgendaGroup> focusStack = agenda.getStackList();
    for (Iterator<AgendaGroup> it = focusStack.iterator(); it.hasNext(); ) {
      AgendaGroup group = it.next();
      _fsb.addGroupName(group.getName());
    }
    _ab.setFocusStack(_fsb.build());

    // serialize all dormant activations
    org.drools.core.util.Iterator it = ActivationIterator.iterator(wm);
    List<org.drools.core.spi.Activation> dormant = new ArrayList<org.drools.core.spi.Activation>();
    for (org.drools.core.spi.Activation item = (org.drools.core.spi.Activation) it.next();
        item != null;
        item = (org.drools.core.spi.Activation) it.next()) {
      if (!item.isQueued()) {
        dormant.add(item);
      }
    }
    Collections.sort(dormant, ActivationsSorter.INSTANCE);
    for (org.drools.core.spi.Activation activation : dormant) {
      _ab.addMatch(writeActivation(context, (AgendaItem) activation));
    }

    // serialize all network evaluator activations
    for (Activation activation : agenda.getActivations()) {
      if (activation.isRuleAgendaItem()) {
        // serialize it
        _ab.addRuleActivation(writeActivation(context, (AgendaItem) activation));
      }
    }

    _ksb.setAgenda(_ab.build());
  }
  public static void writeTruthMaintenanceSystem(
      MarshallerWriteContext context, ProtobufMessages.RuleData.Builder _session)
      throws IOException {
    ObjectHashMap assertMap = context.wm.getTruthMaintenanceSystem().getAssertMap();
    ObjectHashMap justifiedMap = context.wm.getTruthMaintenanceSystem().getJustifiedMap();

    if (!assertMap.isEmpty() || !justifiedMap.isEmpty()) {
      EqualityKey[] keys = new EqualityKey[assertMap.size()];
      org.drools.core.util.Iterator it = assertMap.iterator();
      int i = 0;
      for (org.drools.core.util.ObjectHashMap.ObjectEntry entry =
              (org.drools.core.util.ObjectHashMap.ObjectEntry) it.next();
          entry != null;
          entry = (org.drools.core.util.ObjectHashMap.ObjectEntry) it.next()) {
        EqualityKey key = (EqualityKey) entry.getKey();
        keys[i++] = key;
      }

      Arrays.sort(keys, EqualityKeySorter.instance);

      ProtobufMessages.TruthMaintenanceSystem.Builder _tms =
          ProtobufMessages.TruthMaintenanceSystem.newBuilder();

      // write the assert map of Equality keys
      for (EqualityKey key : keys) {
        ProtobufMessages.EqualityKey.Builder _key = ProtobufMessages.EqualityKey.newBuilder();
        _key.setStatus(key.getStatus());
        _key.setHandleId(key.getFactHandle().getId());
        if (key.getOtherFactHandle() != null && !key.getOtherFactHandle().isEmpty()) {
          for (InternalFactHandle handle : key.getOtherFactHandle()) {
            _key.addOtherHandle(handle.getId());
          }
        }
        _tms.addKey(_key.build());
      }

      it = justifiedMap.iterator();
      i = 0;
      for (org.drools.core.util.ObjectHashMap.ObjectEntry entry =
              (org.drools.core.util.ObjectHashMap.ObjectEntry) it.next();
          entry != null;
          entry = (org.drools.core.util.ObjectHashMap.ObjectEntry) it.next()) {
        ProtobufMessages.Justification.Builder _justification =
            ProtobufMessages.Justification.newBuilder();
        _justification.setHandleId(((Integer) entry.getKey()).intValue());

        org.drools.core.util.LinkedList list = (org.drools.core.util.LinkedList) entry.getValue();
        for (LinkedListEntry node = (LinkedListEntry) list.getFirst();
            node != null;
            node = (LinkedListEntry) node.getNext()) {
          LogicalDependency dependency = (LogicalDependency) node.getObject();
          org.drools.spi.Activation activation = dependency.getJustifier();
          ProtobufMessages.Activation _activation =
              ProtobufMessages.Activation.newBuilder()
                  .setPackageName(activation.getRule().getPackage())
                  .setRuleName(activation.getRule().getName())
                  .setTuple(PersisterHelper.createTuple(activation.getTuple()))
                  .build();
          _justification.addActivation(_activation);
        }
        _tms.addJustification(_justification.build());
      }
      _session.setTms(_tms.build());
    }
  }