public static void readFactHandles( MarshallerReaderContext context, org.drools.core.marshalling.impl.ProtobufMessages.EntryPoint _ep, ObjectStore objectStore, List<PropagationContextImpl> pctxs) throws IOException, ClassNotFoundException { InternalWorkingMemory wm = context.wm; SessionEntryPoint entryPoint = context.wm.getEntryPoints().get(_ep.getEntryPointId()); // load the handles for (ProtobufMessages.FactHandle _handle : _ep.getHandleList()) { InternalFactHandle handle = readFactHandle(context, entryPoint, _handle); context.handles.put(handle.getId(), handle); if (!_handle.getIsJustified()) { // BeliefSystem handles the Object type if (handle.getObject() != null) { objectStore.addHandle(handle, handle.getObject()); } // add handle to object type node assertHandleIntoOTN(context, wm, handle, pctxs); } } }
public boolean evaluate( InternalWorkingMemory workingMemory, final InternalReadAccessor extractor1, final InternalFactHandle handle1, final InternalReadAccessor extractor2, final InternalFactHandle handle2) { if (extractor1.isNullValue(workingMemory, handle1.getObject()) || extractor2.isNullValue(workingMemory, handle2.getObject())) { return false; } long rightTS; if (extractor1.isSelfReference()) { rightTS = ((EventFactHandle) handle1).getEndTimestamp(); } else { rightTS = extractor1.getLongValue(workingMemory, handle1.getObject()); } long leftTS; if (extractor2.isSelfReference()) { leftTS = ((EventFactHandle) handle2).getStartTimestamp(); } else { leftTS = extractor2.getLongValue(workingMemory, handle2.getObject()); } return evaluate(rightTS, leftTS); }
public boolean evaluate( InternalWorkingMemory workingMemory, final InternalReadAccessor extractor1, final InternalFactHandle handle1, final InternalReadAccessor extractor2, final InternalFactHandle handle2) { if (extractor1.isNullValue(workingMemory, handle1.getObject()) || extractor2.isNullValue(workingMemory, handle2.getObject())) { return false; } long rightTS; if (extractor1.isSelfReference()) { rightTS = ((EventFactHandle) handle1).getEndTimestamp(); } else { rightTS = extractor1.getLongValue(workingMemory, handle1.getObject()); } long leftTS; if (extractor2.isSelfReference()) { leftTS = ((EventFactHandle) handle2).getStartTimestamp(); } else { leftTS = extractor2.getLongValue(workingMemory, handle2.getObject()); } long dist = leftTS - rightTS; return this.getOperator().isNegated() ^ (dist >= this.initRange && dist <= this.finalRange); }
public boolean evaluate( InternalWorkingMemory workingMemory, final InternalReadAccessor extractor1, final InternalFactHandle handl1, final InternalReadAccessor extractor2, final InternalFactHandle handl2) { final Object value1 = extractor1.getValue(workingMemory, handl1.getObject()); final Object value2 = extractor2.getValue(workingMemory, handl2.getObject()); return !soundslike((String) value1, (String) value2); }
public void execute(InternalWorkingMemory workingMemory) { DroolsQuery query = (DroolsQuery) factHandle.getObject(); RightTupleList rightTuples = query.getResultInsertRightTupleList(); query.setResultInsertRightTupleList( null); // null so further operations happen on a new stack element for (RightTuple rightTuple = rightTuples.getFirst(); rightTuple != null; ) { RightTuple tmp = (RightTuple) rightTuple.getNext(); rightTuples.remove(rightTuple); for (LeftTuple childLeftTuple = rightTuple.firstChild; childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getRightParentNext()) { node.getSinkPropagator() .doPropagateAssertLeftTuple( context, workingMemory, childLeftTuple, childLeftTuple.getLeftTupleSink()); } rightTuple = tmp; } // @FIXME, this should work, but it's closing needed fact handles // actually an evaluation 34 appears on the stack twice.... // if ( !node.isOpenQuery() ) { // workingMemory.getFactHandleFactory().destroyFactHandle( this.factHandle ); // } }
private static void assertHandleIntoOTN( MarshallerReaderContext context, InternalWorkingMemory wm, InternalFactHandle handle, List<PropagationContextImpl> pctxs) { Object object = handle.getObject(); InternalWorkingMemoryEntryPoint ep = (InternalWorkingMemoryEntryPoint) handle.getEntryPoint(); ObjectTypeConf typeConf = ((InternalWorkingMemoryEntryPoint) handle.getEntryPoint()) .getObjectTypeConfigurationRegistry() .getObjectTypeConf(ep.getEntryPoint(), object); PropagationContextImpl propagationContext = new PropagationContextImpl( wm.getNextPropagationIdCounter(), PropagationContext.INSERTION, null, null, handle, ep.getEntryPoint(), context); // keeping this list for a later cleanup is necessary because of the lazy propagations that // might occur pctxs.add(propagationContext); ep.getEntryPointNode().assertObject(handle, propagationContext, typeConf, wm); propagationContext.evaluateActionQueue(wm); wm.executeQueuedActions(); }
public static void readTruthMaintenanceSystem( MarshallerReaderContext context, SessionEntryPoint wmep, ProtobufMessages.EntryPoint _ep, List<PropagationContextImpl> pctxs) throws IOException, ClassNotFoundException { TruthMaintenanceSystem tms = ((NamedEntryPoint) wmep).getTruthMaintenanceSystem(); ProtobufMessages.TruthMaintenanceSystem _tms = _ep.getTms(); for (ProtobufMessages.EqualityKey _key : _tms.getKeyList()) { InternalFactHandle handle = (InternalFactHandle) context.handles.get(_key.getHandleId()); // ObjectTypeConf state is not marshalled, so it needs to be re-determined ObjectTypeConf typeConf = context .wm .getObjectTypeConfigurationRegistry() .getObjectTypeConf( ((NamedEntryPoint) handle.getEntryPoint()).getEntryPoint(), handle.getObject()); if (!typeConf.isTMSEnabled()) { typeConf.enableTMS(); } EqualityKey key = new EqualityKey(handle, _key.getStatus()); handle.setEqualityKey(key); if (key.getStatus() == EqualityKey.JUSTIFIED) { // not yet added to the object stores ((NamedEntryPoint) handle.getEntryPoint()) .getObjectStore() .addHandle(handle, handle.getObject()); // add handle to object type node assertHandleIntoOTN(context, context.wm, handle, pctxs); } for (Integer factHandleId : _key.getOtherHandleList()) { handle = (InternalFactHandle) context.handles.get(factHandleId.intValue()); key.addFactHandle(handle); handle.setEqualityKey(key); } tms.put(key); readBeliefSet(context, tms, key, _key.getBeliefSet()); } }
public boolean evaluateCachedLeft( InternalWorkingMemory workingMemory, final VariableContextEntry context, final InternalFactHandle right) { if (context.leftNull || context.extractor.isNullValue(workingMemory, right.getObject())) { return false; } long leftTS = ((LeftStartRightEndContextEntry) context).timestamp; long rightTS; if (context.getFieldExtractor().isSelfReference()) { rightTS = ((EventFactHandle) right).getEndTimestamp(); } else { rightTS = context.getFieldExtractor().getLongValue(workingMemory, right.getObject()); } return evaluate(rightTS, leftTS); }
public boolean evaluateCachedLeft( InternalWorkingMemory workingMemory, final VariableContextEntry context, final InternalFactHandle right) { final String value = (String) context.extractor.getValue(workingMemory, right.getObject()); return !soundslike(value, (String) ((ObjectVariableContextEntry) context).left); }
public boolean evaluateCachedLeft( InternalWorkingMemory workingMemory, final VariableContextEntry context, final InternalFactHandle right) { if (context.leftNull || context.extractor.isNullValue(workingMemory, right.getObject())) { return false; } long leftTS = ((LeftStartRightEndContextEntry) context).timestamp; long rightTS; if (context.getFieldExtractor().isSelfReference()) { rightTS = ((EventFactHandle) right).getEndTimestamp(); } else { rightTS = context.getFieldExtractor().getLongValue(workingMemory, right.getObject()); } long dist = leftTS - rightTS; return this.getOperator().isNegated() ^ (dist >= this.initRange && dist <= this.finalRange); }
public boolean evaluateCachedRight( InternalWorkingMemory workingMemory, final VariableContextEntry context, final InternalFactHandle left) { final String value = (String) ((ObjectVariableContextEntry) context).right; return !soundslike( value, (String) context.declaration.getExtractor().getValue(workingMemory, left.getObject())); }
private static ProtobufMessages.FactHandle.HandleType getHandleType(InternalFactHandle handle) { if (handle instanceof EventFactHandle) { return ProtobufMessages.FactHandle.HandleType.EVENT; } else if (handle instanceof QueryElementFactHandle) { return ProtobufMessages.FactHandle.HandleType.QUERY; } else if (handle.getObject() instanceof InitialFact) { return ProtobufMessages.FactHandle.HandleType.INITIAL_FACT; } return ProtobufMessages.FactHandle.HandleType.FACT; }
public boolean evaluate( InternalWorkingMemory workingMemory, final InternalReadAccessor extractor, final InternalFactHandle handle1, final FieldValue object2) { final String value1 = (String) extractor.getValue(workingMemory, handle1.getObject()); final String value2 = (String) object2.getValue(); return !soundslike(value1, value2); }
@Override public Iterator getResults( Tuple leftTuple, InternalWorkingMemory wm, PropagationContext ctx, Object providerContext) { InternalFactHandle fh = leftTuple.getFactHandle(); Object obj = fh.getObject(); if (obj instanceof DroolsQuery) { obj = ((DroolsQuery) obj).getElements()[declaration.getPattern().getOffset()]; } return xpathEvaluator.evaluate(wm, leftTuple, obj).iterator(); }
/** * This is the entry point into the network for all asserted Facts. Iterates a cache of matching * <code>ObjectTypdeNode</code>s asserting the Fact. If the cache does not exist it first iteraes * and builds the cache. * * @param factHandle The FactHandle of the fact to assert * @param context The <code>PropagationContext</code> of the <code>WorkingMemory</code> action * @param workingMemory The working memory session. */ public void assertObject( final InternalFactHandle factHandle, final PropagationContext context, final InternalWorkingMemory workingMemory) { EntryPointId entryPoint = context.getEntryPoint(); EntryPointNode node = this.entryPoints.get(entryPoint); ObjectTypeConf typeConf = ((InternalWorkingMemoryEntryPoint) workingMemory.getWorkingMemoryEntryPoint(entryPoint.getEntryPointId())) .getObjectTypeConfigurationRegistry() .getObjectTypeConf(entryPoint, factHandle.getObject()); node.assertObject(factHandle, context, typeConf, workingMemory); }
public void execute(InternalWorkingMemory workingMemory) { DroolsQuery query = (DroolsQuery) factHandle.getObject(); RightTupleList rightTuples = query.getResultRetractRightTupleList(); query.setResultRetractRightTupleList( null); // null so further operations happen on a new stack element for (RightTuple rightTuple = rightTuples.getFirst(); rightTuple != null; ) { RightTuple tmp = (RightTuple) rightTuple.getNext(); rightTuples.remove(rightTuple); this.node .getSinkPropagator() .propagateRetractRightTuple(rightTuple, context, workingMemory); rightTuple = tmp; } }
public void byPassModifyToBetaNode( final InternalFactHandle factHandle, final ModifyPreviousTuples modifyPreviousTuples, final PropagationContext context, final InternalWorkingMemory workingMemory) { final Object object = factHandle.getObject(); // We need to iterate in the same order as the assert if (this.hashedFieldIndexes != null) { // Iterate the FieldIndexes to see if any are hashed for (FieldIndex fieldIndex = this.hashedFieldIndexes.getFirst(); fieldIndex != null; fieldIndex = fieldIndex.getNext()) { if (!fieldIndex.isHashed()) { continue; } // this field is hashed so set the existing hashKey and see if there is a sink for it final AlphaNode sink = (AlphaNode) this.hashedSinkMap.get(new HashKey(fieldIndex, object)); if (sink != null) { // only alpha nodes are hashable sink.getObjectSinkPropagator() .byPassModifyToBetaNode(factHandle, modifyPreviousTuples, context, workingMemory); } } } // propagate unhashed if (this.hashableSinks != null) { for (ObjectSinkNode sink = this.hashableSinks.getFirst(); sink != null; sink = sink.getNextObjectSinkNode()) { // only alpha nodes are hashable ((AlphaNode) sink) .getObjectSinkPropagator() .byPassModifyToBetaNode(factHandle, modifyPreviousTuples, context, workingMemory); } } if (this.otherSinks != null) { // propagate others for (ObjectSinkNode sink = this.otherSinks.getFirst(); sink != null; sink = sink.getNextObjectSinkNode()) { // compound alpha, lianode or betanode sink.byPassModifyToBetaNode(factHandle, modifyPreviousTuples, context, workingMemory); } } }
public boolean evaluate( InternalWorkingMemory workingMemory, final InternalReadAccessor extractor, final InternalFactHandle object1, final FieldValue object2) { long rightTS; if (extractor.isSelfReference()) { rightTS = ((EventFactHandle) object1).getStartTimestamp(); } else { rightTS = extractor.getLongValue(workingMemory, object1.getObject()); } long leftTS = ((Date) object2.getValue()).getTime(); return evaluate(rightTS, leftTS); }
public void propagateModifyObject( final InternalFactHandle factHandle, final ModifyPreviousTuples modifyPreviousTuples, final PropagationContext context, final InternalWorkingMemory workingMemory) { final Object object = factHandle.getObject(); // Iterates the FieldIndex collection, which tells you if particularly field is hashed or not // if the field is hashed then it builds the hashkey to return the correct sink for the current // objects slot's // value, one object may have multiple fields indexed. if (this.hashedFieldIndexes != null) { // Iterate the FieldIndexes to see if any are hashed for (FieldIndex fieldIndex = this.hashedFieldIndexes.getFirst(); fieldIndex != null; fieldIndex = fieldIndex.getNext()) { if (!fieldIndex.isHashed()) { continue; } // this field is hashed so set the existing hashKey and see if there is a sink for it final AlphaNode sink = (AlphaNode) this.hashedSinkMap.get(new HashKey(fieldIndex, object)); if (sink != null) { // go straight to the AlphaNode's propagator, as we know it's true and no need to retest sink.getObjectSinkPropagator() .propagateModifyObject(factHandle, modifyPreviousTuples, context, workingMemory); } } } // propagate unhashed if (this.hashableSinks != null) { for (ObjectSinkNode sink = this.hashableSinks.getFirst(); sink != null; sink = sink.getNextObjectSinkNode()) { doPropagateModifyObject(factHandle, modifyPreviousTuples, context, workingMemory, sink); } } if (this.otherSinks != null) { // propagate others for (ObjectSinkNode sink = this.otherSinks.getFirst(); sink != null; sink = sink.getNextObjectSinkNode()) { doPropagateModifyObject(factHandle, modifyPreviousTuples, context, workingMemory, sink); } } }
private static ProtobufMessages.FactHandle writeFactHandle( MarshallerWriteContext context, ObjectMarshallingStrategyStore objectMarshallingStrategyStore, InternalFactHandle handle) throws IOException { ProtobufMessages.FactHandle.Builder _handle = ProtobufMessages.FactHandle.newBuilder(); _handle.setType(getHandleType(handle)); _handle.setId(handle.getId()); _handle.setRecency(handle.getRecency()); if (_handle.getType() == ProtobufMessages.FactHandle.HandleType.EVENT) { // is event EventFactHandle efh = (EventFactHandle) handle; _handle.setTimestamp(efh.getStartTimestamp()); _handle.setDuration(efh.getDuration()); _handle.setIsExpired(efh.isExpired()); _handle.setActivationsCount(efh.getActivationsCount()); } if (handle.getEqualityKey() != null && handle.getEqualityKey().getStatus() == EqualityKey.JUSTIFIED) { _handle.setIsJustified(true); } else { _handle.setIsJustified(false); } Object object = handle.getObject(); if (object != null) { ObjectMarshallingStrategy strategy = objectMarshallingStrategyStore.getStrategyObject(object); Integer index = context.getStrategyIndex(strategy); _handle.setStrategyIndex(index.intValue()); _handle.setObject( ByteString.copyFrom( strategy.marshal(context.strategyContext.get(strategy), context, object))); } return _handle.build(); }
public void execute(InternalWorkingMemory workingMemory) { InternalFactHandle factHandle = (InternalFactHandle) leftTuple.getObject(); if (node.isOpenQuery()) { // iterate to the query terminal node, as the child leftTuples will get picked up there workingMemory .getEntryPointNode() .retractObject( factHandle, context, workingMemory .getObjectTypeConfigurationRegistry() .getObjectTypeConf(workingMemory.getEntryPoint(), factHandle.getObject()), workingMemory); // workingMemory.getFactHandleFactory().destroyFactHandle( factHandle ); } else { // get child left tuples, as there is no open query if (leftTuple.getFirstChild() != null) { node.getSinkPropagator().propagateRetractLeftTuple(leftTuple, context, workingMemory); } } }
public void assertObject( final InternalFactHandle handle, final PropagationContext context, final ObjectTypeConf objectTypeConf, final InternalWorkingMemory workingMemory) { if (log.isTraceEnabled()) { log.trace("Insert {}", handle.toString()); } // checks if shadow is enabled if (objectTypeConf.isShadowEnabled()) { // the user has implemented the ShadowProxy interface, let their implementation // know it is safe to update the information the engine can see. ((ShadowProxy) handle.getObject()).updateProxy(); } ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes(); for (int i = 0, length = cachedNodes.length; i < length; i++) { cachedNodes[i].assertObject(handle, context, workingMemory); } }
@Test public void testRestract() { final PropagationContext context = pctxFactory.createPropagationContext(0, PropagationContext.INSERTION, null, null, null); final StatefulKnowledgeSessionImpl workingMemory = new StatefulKnowledgeSessionImpl( 1L, (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase()); final ClassFieldReader extractor = store.getReader(Cheese.class, "type"); final MvelConstraint constraint = new MvelConstraintTestUtil( "type == \"stilton\"", FieldFactory.getInstance().getFieldValue("stilton"), extractor); final List list = new ArrayList(); final Cheese cheese1 = new Cheese("stilton", 5); final Cheese cheese2 = new Cheese("stilton", 15); list.add(cheese1); list.add(cheese2); final MockDataProvider dataProvider = new MockDataProvider(list); final Pattern pattern = new Pattern(0, new ClassObjectType(Cheese.class)); From fromCe = new From(dataProvider); fromCe.setResultPattern(pattern); final ReteFromNode from = new ReteFromNode( 3, dataProvider, new MockTupleSource(30), new AlphaNodeFieldConstraint[] {constraint}, null, true, buildContext, fromCe); final MockLeftTupleSink sink = new MockLeftTupleSink(5); from.addTupleSink(sink); final List asserted = sink.getAsserted(); final Person person1 = new Person("xxx2", 30); final FactHandle person1Handle = workingMemory.insert(person1); final LeftTuple tuple = new LeftTupleImpl((DefaultFactHandle) person1Handle, from, true); from.assertLeftTuple(tuple, context, workingMemory); assertEquals(2, asserted.size()); final FromMemory memory = (FromMemory) workingMemory.getNodeMemory(from); assertEquals(1, memory.getBetaMemory().getLeftTupleMemory().size()); assertNull(memory.getBetaMemory().getRightTupleMemory()); RightTuple rightTuple2 = tuple.getFirstChild().getRightParent(); RightTuple rightTuple1 = tuple.getFirstChild().getHandleNext().getRightParent(); assertFalse(rightTuple1.equals(rightTuple2)); assertNull(tuple.getFirstChild().getHandleNext().getHandleNext()); final InternalFactHandle handle2 = rightTuple2.getFactHandle(); final InternalFactHandle handle1 = rightTuple1.getFactHandle(); assertEquals(handle1.getObject(), cheese2); assertEquals(handle2.getObject(), cheese1); from.retractLeftTuple(tuple, context, workingMemory); assertEquals(0, memory.getBetaMemory().getLeftTupleMemory().size()); assertNull(memory.getBetaMemory().getRightTupleMemory()); }
private static void readBeliefSet( MarshallerReaderContext context, TruthMaintenanceSystem tms, EqualityKey key, ProtobufMessages.BeliefSet _beliefSet) throws IOException, ClassNotFoundException { InternalFactHandle handle = (InternalFactHandle) context.handles.get(_beliefSet.getHandleId()); for (ProtobufMessages.LogicalDependency _logicalDependency : _beliefSet.getLogicalDependencyList()) { ProtobufMessages.Activation _activation = _logicalDependency.getActivation(); Activation activation = (Activation) context .filter .getTuplesCache() .get( PersisterHelper.createActivationKey( _activation.getPackageName(), _activation.getRuleName(), _activation.getTuple())) .getObject(); Object object = null; ObjectMarshallingStrategy strategy = null; if (_logicalDependency.hasObjectStrategyIndex()) { strategy = context.usedStrategies.get(_logicalDependency.getObjectStrategyIndex()); object = strategy.unmarshal( context.strategyContexts.get(strategy), context, _logicalDependency.getObject().toByteArray(), (context.ruleBase == null) ? null : context.ruleBase.getRootClassLoader()); } Object value = null; if (_logicalDependency.hasValueStrategyIndex()) { strategy = context.usedStrategies.get(_logicalDependency.getValueStrategyIndex()); value = strategy.unmarshal( context.strategyContexts.get(strategy), context, _logicalDependency.getValue().toByteArray(), (context.ruleBase == null) ? null : context.ruleBase.getRootClassLoader()); } ObjectTypeConf typeConf = context .wm .getObjectTypeConfigurationRegistry() .getObjectTypeConf( ((NamedEntryPoint) handle.getEntryPoint()).getEntryPoint(), handle.getObject()); tms.readLogicalDependency( handle, object, value, activation, activation.getPropagationContext(), activation.getRule(), typeConf); } }
public void modifyObject( final InternalFactHandle handle, final PropagationContext context, final ObjectTypeConf objectTypeConf, final InternalWorkingMemory wm) { if (log.isTraceEnabled()) { log.trace("Update {}", handle.toString()); } // checks if shadow is enabled if (objectTypeConf.isShadowEnabled()) { // the user has implemented the ShadowProxy interface, let their implementation // know it is safe to update the information the engine can see. ((ShadowProxy) handle.getObject()).updateProxy(); } ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes(); // make a reference to the previous tuples, then null then on the handle ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples( handle.getFirstLeftTuple(), handle.getFirstRightTuple(), unlinkingEnabled); handle.clearLeftTuples(); handle.clearRightTuples(); for (int i = 0, length = cachedNodes.length; i < length; i++) { cachedNodes[i].modifyObject(handle, modifyPreviousTuples, context, wm); // remove any right tuples that matches the current OTN before continue the modify on the next // OTN cache entry if (i < cachedNodes.length - 1) { RightTuple rightTuple = modifyPreviousTuples.peekRightTuple(); while (rightTuple != null && ((BetaNode) rightTuple.getRightTupleSink()).getObjectTypeNode() == cachedNodes[i]) { modifyPreviousTuples.removeRightTuple(); if (unlinkingEnabled) { BetaMemory bm = BetaNode.getBetaMemory((BetaNode) rightTuple.getRightTupleSink(), wm); BetaNode.doDeleteRightTuple(rightTuple, wm, bm); } else { ((BetaNode) rightTuple.getRightTupleSink()).retractRightTuple(rightTuple, context, wm); } rightTuple = modifyPreviousTuples.peekRightTuple(); } LeftTuple leftTuple; ObjectTypeNode otn; while (true) { leftTuple = modifyPreviousTuples.peekLeftTuple(); otn = null; if (leftTuple != null) { LeftTupleSink leftTupleSink = leftTuple.getLeftTupleSink(); if (leftTupleSink instanceof LeftTupleSource) { otn = ((LeftTupleSource) leftTupleSink).getLeftTupleSource().getObjectTypeNode(); } else if (leftTupleSink instanceof RuleTerminalNode) { otn = ((RuleTerminalNode) leftTupleSink).getObjectTypeNode(); } } if (otn == null || otn == cachedNodes[i + 1]) break; modifyPreviousTuples.removeLeftTuple(); if (unlinkingEnabled) { LeftInputAdapterNode liaNode = (LeftInputAdapterNode) leftTuple.getLeftTupleSink().getLeftTupleSource(); LiaNodeMemory lm = (LiaNodeMemory) wm.getNodeMemory(liaNode); LeftInputAdapterNode.doDeleteObject( leftTuple, context, lm.getSegmentMemory(), wm, liaNode, true, lm); } else { leftTuple.getLeftTupleSink().retractLeftTuple(leftTuple, context, wm); } } } } modifyPreviousTuples.retractTuples(context, wm); }