/**
   * Retract the <code>FactHandleimpl</code> from the <code>Rete</code> network. Also remove the
   * <code>FactHandleImpl</code> from the node memory.
   *
   * @param rightTuple The fact handle.
   * @param object The object to assert.
   * @param workingMemory The working memory session.
   */
  public void retractObject(
      final InternalFactHandle factHandle,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {

    if (context.getType() == PropagationContext.MODIFICATION
        && this.skipOnModify
        && context.getDormantActivations() == 0) {
      return;
    }

    if (this.objectMemoryEnabled) {
      final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this);
      memory.remove(factHandle);
    }

    for (RightTuple rightTuple = factHandle.getRightTuple();
        rightTuple != null;
        rightTuple = (RightTuple) rightTuple.getHandleNext()) {
      rightTuple.getRightTupleSink().retractRightTuple(rightTuple, context, workingMemory);
    }
    factHandle.setRightTuple(null);

    for (LeftTuple leftTuple = factHandle.getLeftTuple();
        leftTuple != null;
        leftTuple = (LeftTuple) leftTuple.getLeftParentNext()) {
      leftTuple.getLeftTupleSink().retractLeftTuple(leftTuple, context, workingMemory);
    }
    factHandle.setLeftTuple(null);
  }
示例#2
0
  public static void writeLeftTuples(
      MarshallerWriteContext context, InternalFactHandle[] factHandles) throws IOException {
    ObjectOutputStream stream = context.stream;
    InternalWorkingMemory wm = context.wm;

    // Write out LeftTuples
    // context.out.println( "LeftTuples Start" );
    for (InternalFactHandle handle : factHandles) {
      // InternalFactHandle handle = (InternalFactHandle) it.next();

      for (LeftTuple leftTuple = handle.getFirstLeftTuple();
          leftTuple != null;
          leftTuple = (LeftTuple) leftTuple.getLeftParentNext()) {
        stream.writeShort(PersisterEnums.LEFT_TUPLE);
        int sinkId = leftTuple.getLeftTupleSink().getId();
        stream.writeInt(sinkId);
        stream.writeInt(handle.getId());

        // context.out.println( "LeftTuple sinkId:" + leftTuple.getLeftTupleSink().getId() + "
        // handleId:" + handle.getId() );
        writeLeftTuple(leftTuple, context, true);
      }
    }

    stream.writeShort(PersisterEnums.END);
    // context.out.println( "LeftTuples End" );
  }
示例#3
0
 /**
  * OTN needs to override remove to avoid releasing the node ID, since OTN are never removed from
  * the rulebase in the current implementation
  */
 protected void doRemove(
     final RuleRemovalContext context,
     final ReteooBuilder builder,
     final BaseNode node,
     final InternalWorkingMemory[] workingMemories) {
   if (context.getCleanupAdapter() != null) {
     for (InternalWorkingMemory workingMemory : workingMemories) {
       CleanupAdapter adapter = context.getCleanupAdapter();
       final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this);
       Iterator it = memory.iterator();
       for (ObjectEntry entry = (ObjectEntry) it.next();
           entry != null;
           entry = (ObjectEntry) it.next()) {
         InternalFactHandle handle = (InternalFactHandle) entry.getValue();
         for (LeftTuple leftTuple = handle.getFirstLeftTuple();
             leftTuple != null;
             leftTuple = leftTuple.getLeftParentNext()) {
           adapter.cleanUp(leftTuple, workingMemory);
         }
       }
     }
     context.setCleanupAdapter(null);
   }
   if (!node.isInUse()) {
     removeObjectSink((ObjectSink) node);
   }
 }
 public void propagateModifyChildLeftTuple(
     LeftTuple leftTuple,
     PropagationContext context,
     InternalWorkingMemory workingMemory,
     boolean tupleMemoryEnabled) {
   for (LeftTuple childLeftTuple = leftTuple.getFirstChild();
       childLeftTuple != null;
       childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) {
     childLeftTuple.getLeftTupleSink().modifyLeftTuple(childLeftTuple, context, workingMemory);
   }
 }
 public void propagateRetractLeftTuple(
     final LeftTuple leftTuple,
     final PropagationContext context,
     final InternalWorkingMemory workingMemory) {
   LeftTuple child = leftTuple.getFirstChild();
   while (child != null) {
     LeftTuple temp = child.getLeftParentNext();
     doPropagateRetractLeftTuple(context, workingMemory, child, child.getLeftTupleSink());
     child.unlinkFromRightParent();
     child.unlinkFromLeftParent();
     child = temp;
   }
 }
 public void propagateRetractLeftTupleDestroyRightTuple(
     final LeftTuple leftTuple,
     final PropagationContext context,
     final InternalWorkingMemory workingMemory) {
   LeftTuple child = leftTuple.getFirstChild();
   InternalFactHandle rightParent = child.getRightParent().getFactHandle();
   while (child != null) {
     LeftTuple temp = child.getLeftParentNext();
     doPropagateRetractLeftTuple(context, workingMemory, child, child.getLeftTupleSink());
     child.unlinkFromRightParent();
     child.unlinkFromLeftParent();
     child = temp;
   }
   workingMemory.getFactHandleFactory().destroyFactHandle(rightParent);
 }
示例#7
0
  /**
   * Skips the propagated tuple handles and return the first handle in the list that correspond to a
   * match
   *
   * @param leftTuple
   * @param accctx
   * @return
   */
  private LeftTuple getFirstMatch(
      final LeftTuple leftTuple, final AccumulateContext accctx, final boolean isUpdatingSink) {
    // unlink all right matches
    LeftTuple child = leftTuple.getFirstChild();

    if (accctx.propagated) {
      // To do that, we need to skip the first N children that are in fact
      // the propagated tuples
      int target = isUpdatingSink ? this.sink.size() - 1 : this.sink.size();
      for (int i = 0; i < target; i++) {
        child = child.getLeftParentNext();
      }
    }
    return child;
  }
示例#8
0
 private void removePreviousMatchesForLeftTuple(
     final LeftTuple leftTuple,
     final InternalWorkingMemory workingMemory,
     final AccumulateMemory memory,
     final AccumulateContext accctx) {
   // so we just split the list keeping the head
   LeftTuple[] matchings = splitList(leftTuple, accctx, false);
   for (LeftTuple match = matchings[0]; match != null; match = match.getLeftParentNext()) {
     // can't unlink from the left parent as it was already unlinked during the splitList call
     // above
     match.unlinkFromRightParent();
   }
   // since there are no more matches, the following call will just re-initialize the accumulation
   this.accumulate.init(memory.workingMemoryContext, accctx.context, leftTuple, workingMemory);
 }
  private static ProtobufMessages.NodeMemory writeQueryElementNodeMemory(
      final int nodeId, final Memory memory, final InternalWorkingMemory wm) {
    LeftTupleIterator it = LeftTupleIterator.iterator(wm, ((QueryElementNodeMemory) memory).node);

    ProtobufMessages.NodeMemory.QueryElementNodeMemory.Builder _query =
        ProtobufMessages.NodeMemory.QueryElementNodeMemory.newBuilder();
    for (LeftTuple leftTuple = (LeftTuple) it.next();
        leftTuple != null;
        leftTuple = (LeftTuple) it.next()) {
      InternalFactHandle handle = (InternalFactHandle) leftTuple.getObject();
      FactHandle _handle =
          ProtobufMessages.FactHandle.newBuilder()
              .setId(handle.getId())
              .setRecency(handle.getRecency())
              .build();

      ProtobufMessages.NodeMemory.QueryElementNodeMemory.QueryContext.Builder _context =
          ProtobufMessages.NodeMemory.QueryElementNodeMemory.QueryContext.newBuilder()
              .setTuple(PersisterHelper.createTuple(leftTuple))
              .setHandle(_handle);

      LeftTuple childLeftTuple = leftTuple.getFirstChild();
      while (childLeftTuple != null) {
        RightTuple rightParent = childLeftTuple.getRightParent();
        _context.addResult(
            ProtobufMessages.FactHandle.newBuilder()
                .setId(rightParent.getFactHandle().getId())
                .setRecency(rightParent.getFactHandle().getRecency())
                .build());
        while (childLeftTuple != null && childLeftTuple.getRightParent() == rightParent) {
          // skip to the next child that has a different right parent
          childLeftTuple = childLeftTuple.getLeftParentNext();
        }
      }
      _query.addContext(_context.build());
    }

    return _query.getContextCount() > 0
        ? ProtobufMessages.NodeMemory.newBuilder()
            .setNodeId(nodeId)
            .setNodeType(ProtobufMessages.NodeMemory.NodeType.QUERY_ELEMENT)
            .setQueryElement(_query.build())
            .build()
        : null;
  }
示例#10
0
  public static void writeInitialFactHandleLeftTuples(MarshallerWriteContext context)
      throws IOException {
    ObjectOutputStream stream = context.stream;

    // context.out.println( "InitialFact LeftTuples Start" );
    InternalFactHandle handle = context.wm.getInitialFactHandle();
    for (LeftTuple leftTuple = handle.getFirstLeftTuple();
        leftTuple != null;
        leftTuple = (LeftTuple) leftTuple.getLeftParentNext()) {
      stream.writeShort(PersisterEnums.LEFT_TUPLE);

      stream.writeInt(leftTuple.getLeftTupleSink().getId());
      // context.out.println( "LeftTuple sinkId:" + leftTuple.getLeftTupleSink().getId() );
      writeLeftTuple(leftTuple, context, true);
    }
    stream.writeShort(PersisterEnums.END);
    // context.out.println( "InitialFact LeftTuples End" );
  }
 public LeftTuple propagateRetractChildLeftTuple(
     LeftTuple childLeftTuple,
     RightTuple parentRightTuple,
     PropagationContext context,
     InternalWorkingMemory workingMemory) {
   // iterate to find all child tuples for the shared node
   while (childLeftTuple != null && childLeftTuple.getRightParent() == parentRightTuple) {
     // this will iterate for each child node when the
     // the current node is shared
     LeftTuple temp = childLeftTuple.getLeftParentNext();
     doPropagateRetractLeftTuple(
         context, workingMemory, childLeftTuple, childLeftTuple.getLeftTupleSink());
     childLeftTuple.unlinkFromRightParent();
     childLeftTuple.unlinkFromLeftParent();
     childLeftTuple = temp;
   }
   return childLeftTuple;
 }
  public LeftTuple propagateModifyChildLeftTuple(
      LeftTuple childLeftTuple,
      RightTuple parentRightTuple,
      PropagationContext context,
      InternalWorkingMemory workingMemory,
      boolean tupleMemoryEnabled) {
    // iterate to find all child tuples for the shared node
    while (childLeftTuple != null && childLeftTuple.getRightParent() == parentRightTuple) {
      // this will iterate for each child node when the
      // the current node is shared

      // preserve the current LeftTuple, as we need to iterate to the next before re-adding
      LeftTuple temp = childLeftTuple;
      childLeftTuple.getLeftTupleSink().modifyLeftTuple(childLeftTuple, context, workingMemory);
      childLeftTuple = childLeftTuple.getLeftParentNext();
      temp.reAddRight();
    }
    return childLeftTuple;
  }
示例#13
0
 private void reaccumulateForLeftTuple(
     final LeftTuple leftTuple,
     final InternalWorkingMemory workingMemory,
     final AccumulateMemory memory,
     final AccumulateContext accctx) {
   this.accumulate.init(memory.workingMemoryContext, accctx.context, leftTuple, workingMemory);
   for (LeftTuple childMatch = getFirstMatch(leftTuple, accctx, false);
       childMatch != null;
       childMatch = childMatch.getLeftParentNext()) {
     InternalFactHandle childHandle = childMatch.getRightParent().getFactHandle();
     LeftTuple tuple = leftTuple;
     if (this.unwrapRightObject) {
       tuple = (LeftTuple) childHandle.getObject();
       childHandle = tuple.getLastHandle();
     }
     this.accumulate.accumulate(
         memory.workingMemoryContext, accctx.context, tuple, childHandle, workingMemory);
   }
 }
  /** Retracts the corresponding tuple by retrieving and retracting the fact created for it */
  public void retractLeftTuple(
      final LeftTuple tuple,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {
    final ObjectHashMap memory = (ObjectHashMap) workingMemory.getNodeMemory(this);
    // retrieve handle from memory
    final InternalFactHandle factHandle = (InternalFactHandle) memory.remove(tuple);

    for (RightTuple rightTuple = factHandle.getFirstRightTuple();
        rightTuple != null;
        rightTuple = (RightTuple) rightTuple.getHandleNext()) {
      rightTuple.getRightTupleSink().retractRightTuple(rightTuple, context, workingMemory);
    }
    factHandle.clearRightTuples();

    for (LeftTuple leftTuple = factHandle.getLastLeftTuple();
        leftTuple != null;
        leftTuple = (LeftTuple) leftTuple.getLeftParentNext()) {
      leftTuple.getLeftTupleSink().retractLeftTuple(leftTuple, context, workingMemory);
    }
    factHandle.clearLeftTuples();
  }
示例#15
0
  /**
   * Retract the <code>FactHandleimpl</code> from the <code>Rete</code> network. Also remove the
   * <code>FactHandleImpl</code> from the node memory.
   *
   * @param rightTuple The fact handle.
   * @param object The object to assert.
   * @param workingMemory The working memory session.
   */
  public void retractObject(
      final InternalFactHandle factHandle,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {
    if (objectMemoryEnabled && !(queryNode && !((DroolsQuery) factHandle.getObject()).isOpen())) {
      final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this);
      memory.remove(factHandle);
    }

    for (RightTuple rightTuple = factHandle.getFirstRightTuple();
        rightTuple != null;
        rightTuple = (RightTuple) rightTuple.getHandleNext()) {
      rightTuple.getRightTupleSink().retractRightTuple(rightTuple, context, workingMemory);
    }
    factHandle.clearRightTuples();

    for (LeftTuple leftTuple = factHandle.getFirstLeftTuple();
        leftTuple != null;
        leftTuple = (LeftTuple) leftTuple.getLeftParentNext()) {
      leftTuple.getLeftTupleSink().retractLeftTuple(leftTuple, context, workingMemory);
    }
    factHandle.clearLeftTuples();
  }
示例#16
0
  public void updateSink(
      final LeftTupleSink sink,
      final PropagationContext context,
      final InternalWorkingMemory workingMemory) {
    LeftTupleIterator it = LeftTupleIterator.iterator(workingMemory, this);

    for (LeftTuple leftTuple = (LeftTuple) it.next();
        leftTuple != null;
        leftTuple = (LeftTuple) it.next()) {
      LeftTuple childLeftTuple = leftTuple.getFirstChild();
      while (childLeftTuple != null) {
        RightTuple rightParent = childLeftTuple.getRightParent();
        sink.assertLeftTuple(
            sink.createLeftTuple(leftTuple, rightParent, childLeftTuple, null, sink, true),
            context,
            workingMemory);

        while (childLeftTuple != null && childLeftTuple.getRightParent() == rightParent) {
          // skip to the next child that has a different right parent
          childLeftTuple = childLeftTuple.getLeftParentNext();
        }
      }
    }
  }
示例#17
0
  @Test
  public void testDynamicAddRule() {
    // DROOLS-17
    String str =
        "import org.drools.integrationtests.MiscTest2.A\n"
            + "rule r1 when\n"
            + "    $a : A( f1 == 1 )\n"
            + "then\n"
            + "end\n"
            + "\n"
            + "rule r2 when\n"
            + "    $a : A( f2 == 1 )\n"
            + "then\n"
            + "end\n"
            + "\n"
            + "rule r3 when\n"
            + "    $a : A( f3 == 1 )"
            + "then\n"
            + "end";

    String str2 =
        "import org.drools.integrationtests.MiscTest2.A\n"
            + "rule r4 when\n"
            + "    $a : A( f2 == 1, f4 == 1 )"
            + "then\n"
            + "end";

    KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
    kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL);

    if (kbuilder.hasErrors()) {
      fail(kbuilder.getErrors().toString());
    }

    KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
    kbase.addKnowledgePackages(kbuilder.getKnowledgePackages());

    StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
    FactHandle fh = ksession.insert(new A(1, 1, 1, 1));

    ksession.fireAllRules();

    kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
    kbuilder.add(ResourceFactory.newByteArrayResource(str2.getBytes()), ResourceType.DRL);

    if (kbuilder.hasErrors()) {
      fail(kbuilder.getErrors().toString());
    }
    kbase.addKnowledgePackages(kbuilder.getKnowledgePackages());

    ksession.fireAllRules();

    // this second insert forces the regeneration of the otnIds
    ksession.insert(new A(2, 2, 2, 2));

    LeftTuple leftTuple = ((DefaultFactHandle) fh).getFirstLeftTuple();
    ObjectTypeNode.Id letTupleOtnId = leftTuple.getLeftTupleSink().getLeftInputOtnId();
    leftTuple = leftTuple.getLeftParentNext();
    while (leftTuple != null) {
      assertTrue(letTupleOtnId.before(leftTuple.getLeftTupleSink().getLeftInputOtnId()));
      letTupleOtnId = leftTuple.getLeftTupleSink().getLeftInputOtnId();
      leftTuple = leftTuple.getLeftParentNext();
    }
  }
示例#18
0
  public static void writeLeftTuple(
      LeftTuple leftTuple, MarshallerWriteContext context, boolean recurse) throws IOException {
    ObjectOutputStream stream = context.stream;
    InternalRuleBase ruleBase = context.ruleBase;
    InternalWorkingMemory wm = context.wm;

    LeftTupleSink sink = leftTuple.getLeftTupleSink();

    switch (sink.getType()) {
      case NodeTypeEnums.JoinNode:
        {
          // context.out.println( "JoinNode" );
          for (LeftTuple childLeftTuple = leftTuple.getFirstChild();
              childLeftTuple != null;
              childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) {
            stream.writeShort(PersisterEnums.RIGHT_TUPLE);
            int childSinkId = childLeftTuple.getLeftTupleSink().getId();
            stream.writeInt(childSinkId);
            stream.writeInt(childLeftTuple.getRightParent().getFactHandle().getId());
            // context.out.println( "RightTuple int:" + childLeftTuple.getLeftTupleSink().getId() +
            // " int:" + childLeftTuple.getRightParent().getFactHandle().getId() );
            writeLeftTuple(childLeftTuple, context, recurse);
          }
          stream.writeShort(PersisterEnums.END);
          // context.out.println( "JoinNode   ---   END" );
          break;
        }
      case NodeTypeEnums.QueryRiaFixerNode:
      case NodeTypeEnums.EvalConditionNode:
        {
          // context.out.println( ".... EvalConditionNode" );
          for (LeftTuple childLeftTuple = leftTuple.getFirstChild();
              childLeftTuple != null;
              childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) {
            stream.writeShort(PersisterEnums.LEFT_TUPLE);
            stream.writeInt(childLeftTuple.getLeftTupleSink().getId());
            writeLeftTuple(childLeftTuple, context, recurse);
          }
          stream.writeShort(PersisterEnums.END);
          // context.out.println( "---- EvalConditionNode   ---   END" );
          break;
        }
      case NodeTypeEnums.NotNode:
      case NodeTypeEnums.ForallNotNode:
        {
          if (leftTuple.getBlocker() == null) {
            // is not blocked so has children
            stream.writeShort(PersisterEnums.LEFT_TUPLE_NOT_BLOCKED);

            for (LeftTuple childLeftTuple = leftTuple.getFirstChild();
                childLeftTuple != null;
                childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) {
              stream.writeShort(PersisterEnums.LEFT_TUPLE);
              stream.writeInt(childLeftTuple.getLeftTupleSink().getId());
              writeLeftTuple(childLeftTuple, context, recurse);
            }
            stream.writeShort(PersisterEnums.END);

          } else {
            stream.writeShort(PersisterEnums.LEFT_TUPLE_BLOCKED);
            stream.writeInt(leftTuple.getBlocker().getFactHandle().getId());
          }
          break;
        }
      case NodeTypeEnums.ExistsNode:
        {
          if (leftTuple.getBlocker() == null) {
            // is blocked so has children
            stream.writeShort(PersisterEnums.LEFT_TUPLE_NOT_BLOCKED);
          } else {
            stream.writeShort(PersisterEnums.LEFT_TUPLE_BLOCKED);
            stream.writeInt(leftTuple.getBlocker().getFactHandle().getId());

            for (LeftTuple childLeftTuple = leftTuple.getFirstChild();
                childLeftTuple != null;
                childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) {
              stream.writeShort(PersisterEnums.LEFT_TUPLE);
              stream.writeInt(childLeftTuple.getLeftTupleSink().getId());
              writeLeftTuple(childLeftTuple, context, recurse);
            }
            stream.writeShort(PersisterEnums.END);
          }
          break;
        }
      case NodeTypeEnums.AccumulateNode:
        {
          // context.out.println( ".... AccumulateNode" );
          // accumulate nodes generate new facts on-demand and need special procedures when
          // serializing to persistent storage
          AccumulateMemory memory = (AccumulateMemory) context.wm.getNodeMemory((BetaNode) sink);
          AccumulateContext accctx = (AccumulateContext) leftTuple.getObject();
          // first we serialize the generated fact handle
          writeFactHandle(
              context,
              stream,
              context.objectMarshallingStrategyStore,
              accctx.result.getFactHandle());
          // then we serialize the associated accumulation context
          stream.writeObject(accctx.context);
          // then we serialize the boolean propagated flag
          stream.writeBoolean(accctx.propagated);

          // then we serialize all the propagated tuples
          for (LeftTuple childLeftTuple = leftTuple.getFirstChild();
              childLeftTuple != null;
              childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) {
            if (leftTuple.getLeftTupleSink().getId() == childLeftTuple.getLeftTupleSink().getId()) {
              // this is a matching record, so, associate the right tuples
              // context.out.println( "RightTuple(match) int:" +
              // childLeftTuple.getLeftTupleSink().getId() + " int:" +
              // childLeftTuple.getRightParent().getFactHandle().getId() );
              stream.writeShort(PersisterEnums.RIGHT_TUPLE);
              stream.writeInt(childLeftTuple.getRightParent().getFactHandle().getId());
            } else {
              // this is a propagation record
              // context.out.println( "RightTuple(propagation) int:" +
              // childLeftTuple.getLeftTupleSink().getId() + " int:" +
              // childLeftTuple.getRightParent().getFactHandle().getId() );
              stream.writeShort(PersisterEnums.LEFT_TUPLE);
              int sinkId = childLeftTuple.getLeftTupleSink().getId();
              stream.writeInt(sinkId);
              writeLeftTuple(childLeftTuple, context, recurse);
            }
          }
          stream.writeShort(PersisterEnums.END);
          // context.out.println( "---- AccumulateNode   ---   END" );
          break;
        }
      case NodeTypeEnums.RightInputAdaterNode:
        {
          // context.out.println( ".... RightInputAdapterNode" );
          // RIANs generate new fact handles on-demand to wrap tuples and need special procedures
          // when serializing to persistent storage
          ObjectHashMap memory = (ObjectHashMap) context.wm.getNodeMemory((NodeMemory) sink);
          InternalFactHandle ifh = (InternalFactHandle) memory.get(leftTuple);
          // first we serialize the generated fact handle ID
          // context.out.println( "FactHandle id:"+ifh.getId() );
          stream.writeInt(ifh.getId());
          stream.writeLong(ifh.getRecency());

          writeRightTuples(ifh, context);

          stream.writeShort(PersisterEnums.END);
          // context.out.println( "---- RightInputAdapterNode   ---   END" );
          break;
        }
      case NodeTypeEnums.FromNode:
        {
          // context.out.println( ".... FromNode" );
          // FNs generate new fact handles on-demand to wrap objects and need special procedures
          // when serializing to persistent storage
          FromMemory memory = (FromMemory) context.wm.getNodeMemory((NodeMemory) sink);

          Map<Object, RightTuple> matches = (Map<Object, RightTuple>) leftTuple.getObject();
          for (RightTuple rightTuples : matches.values()) {
            // first we serialize the generated fact handle ID
            stream.writeShort(PersisterEnums.FACT_HANDLE);
            writeFactHandle(
                context,
                stream,
                context.objectMarshallingStrategyStore,
                rightTuples.getFactHandle());
            writeRightTuples(rightTuples.getFactHandle(), context);
          }
          stream.writeShort(PersisterEnums.END);
          for (LeftTuple childLeftTuple = leftTuple.getFirstChild();
              childLeftTuple != null;
              childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) {
            stream.writeShort(PersisterEnums.RIGHT_TUPLE);
            stream.writeInt(childLeftTuple.getLeftTupleSink().getId());
            stream.writeInt(childLeftTuple.getRightParent().getFactHandle().getId());
            // context.out.println( "RightTuple int:" + childLeftTuple.getLeftTupleSink().getId() +
            // " int:" + childLeftTuple.getRightParent().getFactHandle().getId() );
            writeLeftTuple(childLeftTuple, context, recurse);
          }
          stream.writeShort(PersisterEnums.END);
          // context.out.println( "---- FromNode   ---   END" );
          break;
        }
      case NodeTypeEnums.UnificationNode:
        {
          // context.out.println( ".... UnificationNode" );

          QueryElementNode node = (QueryElementNode) sink;
          boolean isOpen = node.isOpenQuery();

          context.writeBoolean(isOpen);
          if (isOpen) {
            InternalFactHandle factHandle = (InternalFactHandle) leftTuple.getObject();
            DroolsQuery query = (DroolsQuery) factHandle.getObject();

            // context.out.println( "factHandle:" +  factHandle );

            factHandle.setObject(null);
            writeFactHandle(context, stream, context.objectMarshallingStrategyStore, 0, factHandle);
            factHandle.setObject(query);
            writeLeftTuples(context, new InternalFactHandle[] {factHandle});
          } else {
            for (LeftTuple childLeftTuple = leftTuple.getFirstChild();
                childLeftTuple != null;
                childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) {
              stream.writeShort(PersisterEnums.LEFT_TUPLE);
              stream.writeInt(childLeftTuple.getLeftTupleSink().getId());
              InternalFactHandle factHandle = childLeftTuple.getLastHandle();
              writeFactHandle(
                  context, stream, context.objectMarshallingStrategyStore, 1, factHandle);
              writeLeftTuple(childLeftTuple, context, recurse);
            }
            stream.writeShort(PersisterEnums.END);
          }
          // context.out.println( "---- EvalConditionNode   ---   END" );
          break;
        }
      case NodeTypeEnums.RuleTerminalNode:
        {
          // context.out.println( "RuleTerminalNode" );
          int pos = context.terminalTupleMap.size();
          context.terminalTupleMap.put(leftTuple, pos);
          break;
        }
      case NodeTypeEnums.QueryTerminalNode:
        {
          // context.out.println( ".... QueryTerminalNode" );
          //                LeftTuple entry = leftTuple;
          //
          //                // find the DroolsQuery object
          //                while ( entry.getParent() != null ) {
          //                    entry = entry.getParent();
          //                }
          //
          //                // Now output all the child tuples in the caller network
          //                DroolsQuery query = (DroolsQuery) entry.getLastHandle().getObject();
          //                if ( query.getQueryResultCollector() instanceof
          // UnificationNodeViewChangedEventListener ) {
          //                    context.writeBoolean( true );
          //                    UnificationNodeViewChangedEventListener collector =
          // (UnificationNodeViewChangedEventListener) query.getQueryResultCollector();
          //                    leftTuple = collector.getLeftTuple();
          //
          context.writeBoolean(true);
          RightTuple rightTuple = (RightTuple) leftTuple.getObject();
          // context.out.println( "rightTuple:" +  rightTuple.getFactHandle() );
          writeFactHandle(
              context,
              stream,
              context.objectMarshallingStrategyStore,
              1,
              rightTuple.getFactHandle());

          for (LeftTuple childLeftTuple = rightTuple.firstChild;
              childLeftTuple != null;
              childLeftTuple = (LeftTuple) childLeftTuple.getRightParentNext()) {
            stream.writeShort(PersisterEnums.LEFT_TUPLE);
            stream.writeInt(childLeftTuple.getLeftTupleSink().getId());
            writeLeftTuple(childLeftTuple, context, recurse);
          }

          //                    for ( LeftTuple childLeftTuple = leftTuple.getFirstChild();
          // childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()
          // ) {
          //                        stream.writeShort( PersisterEnums.LEFT_TUPLE );
          //                        stream.writeInt( childLeftTuple.getLeftTupleSink().getId() );
          //                        writeFactHandle( context,
          //                                         stream,
          //                                         context.objectMarshallingStrategyStore,
          //                                         1,
          //                                         childLeftTuple.getLastHandle() );
          //                        writeLeftTuple( childLeftTuple,
          //                                        context,
          //                                        recurse );
          //                    }
          //                } else {
          //                    context.writeBoolean( false );
          //                }
          stream.writeShort(PersisterEnums.END);
          // context.out.println( "---- QueryTerminalNode   ---   END" );
          break;
        }
    }
  }
示例#19
0
  public void modifyLeftTuple(
      LeftTuple leftTuple, PropagationContext context, InternalWorkingMemory workingMemory) {
    final AccumulateMemory memory = (AccumulateMemory) workingMemory.getNodeMemory(this);
    final AccumulateContext accctx = (AccumulateContext) leftTuple.getObject();

    BetaMemory bm = memory.betaMemory;

    // Add and remove to make sure we are in the right bucket and at the end
    // this is needed to fix for indexing and deterministic iteration
    bm.getLeftTupleMemory().removeAdd(leftTuple);

    this.constraints.updateFromTuple(bm.getContext(), workingMemory, leftTuple);
    LeftTuple childLeftTuple = getFirstMatch(leftTuple, accctx, false);

    RightTupleMemory rightMemory = bm.getRightTupleMemory();

    FastIterator rightIt = getRightIterator(rightMemory);

    RightTuple rightTuple = getFirstRightTuple(leftTuple, rightMemory, context, rightIt);

    // first check our index (for indexed nodes only) hasn't changed and we are returning the same
    // bucket
    // if rightTuple is null, we assume there was a bucket change and that bucket is empty
    if (childLeftTuple != null
        && rightMemory.isIndexed()
        && !rightIt.isFullIterator()
        && (rightTuple == null
            || (rightTuple.getMemory() != childLeftTuple.getRightParent().getMemory()))) {
      // our index has changed, so delete all the previous matchings
      removePreviousMatchesForLeftTuple(leftTuple, workingMemory, memory, accctx);

      childLeftTuple = null; // null so the next check will attempt matches for new bucket
    }

    // we can't do anything if RightTupleMemory is empty
    if (rightTuple != null) {
      if (childLeftTuple == null) {
        // either we are indexed and changed buckets or
        // we had no children before, but there is a bucket to potentially match, so try as normal
        // assert
        for (; rightTuple != null; rightTuple = (RightTuple) rightIt.next(rightTuple)) {
          final InternalFactHandle handle = rightTuple.getFactHandle();
          if (this.constraints.isAllowedCachedLeft(bm.getContext(), handle)) {
            // add a new match
            addMatch(leftTuple, rightTuple, null, null, workingMemory, memory, accctx, true);
          }
        }
      } else {
        boolean isDirty = false;
        // in the same bucket, so iterate and compare
        for (; rightTuple != null; rightTuple = (RightTuple) rightIt.next(rightTuple)) {
          final InternalFactHandle handle = rightTuple.getFactHandle();

          if (this.constraints.isAllowedCachedLeft(bm.getContext(), handle)) {
            if (childLeftTuple == null || childLeftTuple.getRightParent() != rightTuple) {
              // add a new match
              addMatch(
                  leftTuple, rightTuple, childLeftTuple, null, workingMemory, memory, accctx, true);
            } else {
              // we must re-add this to ensure deterministic iteration
              LeftTuple temp = childLeftTuple.getLeftParentNext();
              childLeftTuple.reAddRight();
              childLeftTuple = temp;
            }
          } else if (childLeftTuple != null && childLeftTuple.getRightParent() == rightTuple) {
            LeftTuple temp = childLeftTuple.getLeftParentNext();
            // remove the match
            removeMatch(rightTuple, childLeftTuple, workingMemory, memory, accctx, false);
            childLeftTuple = temp;
            // the next line means that when a match is removed from the current leftTuple
            // and the accumulate does not support the reverse operation, then the whole
            // result is dirty (since removeMatch above is not recalculating the total)
            // and we need to do this later
            isDirty = !accumulate.supportsReverse();
          }
          // else do nothing, was false before and false now.
        }
        if (isDirty) {
          reaccumulateForLeftTuple(leftTuple, workingMemory, memory, accctx);
        }
      }
    }

    this.constraints.resetTuple(memory.betaMemory.getContext());
    if (accctx.getAction() == null) {
      evaluateResultConstraints(
          ActivitySource.LEFT, leftTuple, context, workingMemory, memory, accctx, true);
    } // else evaluation is already scheduled, so do nothing
  }