/** * When L&R Unlinking is enabled, updateSink() is used to populate a node's memory, but it has to * take into account if it's propagating. */ private void updateLRUnlinking( final ObjectSink sink, final PropagationContext context, final InternalWorkingMemory workingMemory) { final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this); Iterator it = memory.iterator(); InternalFactHandle ctxHandle = (InternalFactHandle) context.getFactHandle(); if (!context.isPropagating(this) || (context.isPropagating(this) && context.shouldPropagateAll())) { for (ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next()) { // Assert everything sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory); } } else { for (ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next()) { InternalFactHandle handle = (InternalFactHandle) entry.getValue(); // Exclude the current fact propagation if (handle.getId() != ctxHandle.getId()) { sink.assertObject(handle, context, workingMemory); } } } }
public BaseNode getMatchingNode(BaseNode candidate) { if (this.otherSinks != null) { for (ObjectSinkNode sink = this.otherSinks.getFirst(); sink != null; sink = sink.getNextObjectSinkNode()) { if (sink.thisNodeEquals(candidate)) { return (BaseNode) sink; } } } if (this.hashableSinks != null) { for (ObjectSinkNode sink = this.hashableSinks.getFirst(); sink != null; sink = sink.getNextObjectSinkNode()) { if (sink.thisNodeEquals(candidate)) { return (BaseNode) sink; } } } if (this.hashedSinkMap != null) { final Iterator it = this.hashedSinkMap.newIterator(); for (ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next()) { final ObjectSink sink = (ObjectSink) entry.getValue(); if (sink.thisNodeEquals(candidate)) { return (BaseNode) sink; } } } return null; }
public void updateSink( final ObjectSink sink, final PropagationContext context, final InternalWorkingMemory workingMemory) { // @todo // JBRULES-612: the cache MUST be invalidated when a new node type is added to the network, so // iterate and reset all caches. final ObjectTypeNode node = (ObjectTypeNode) sink; final ObjectType newObjectType = node.getObjectType(); InternalWorkingMemoryEntryPoint wmEntryPoint = (InternalWorkingMemoryEntryPoint) workingMemory.getWorkingMemoryEntryPoint(this.entryPoint.getEntryPointId()); for (ObjectTypeConf objectTypeConf : wmEntryPoint.getObjectTypeConfigurationRegistry().values()) { if (newObjectType.isAssignableFrom( objectTypeConf.getConcreteObjectTypeNode().getObjectType())) { objectTypeConf.resetCache(); ObjectTypeNode sourceNode = objectTypeConf.getConcreteObjectTypeNode(); Iterator it = ((ObjectTypeNodeMemory) workingMemory.getNodeMemory(sourceNode)).memory.iterator(); for (ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next()) { sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory); } } } }
/** * This is a Hook method for subclasses to override. Please keep it protected unless you know what * you are doing. */ protected void doPropagateAssertObject( InternalFactHandle factHandle, PropagationContext context, InternalWorkingMemory workingMemory, ObjectSink sink) { sink.assertObject(factHandle, context, workingMemory); }
protected void doPropagateModifyObject( InternalFactHandle factHandle, final ModifyPreviousTuples modifyPreviousTuples, PropagationContext context, InternalWorkingMemory workingMemory, ObjectSink sink) { sink.modifyObject(factHandle, modifyPreviousTuples, context, workingMemory); }
public void updateSink( final ObjectSink sink, final PropagationContext context, final InternalWorkingMemory workingMemory) { final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this); Iterator it = memory.iterator(); for (ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next()) { sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory); } }
public void updateSink( final ObjectSink sink, final PropagationContext context, final InternalWorkingMemory workingMemory) { final ObjectHashMap memory = (ObjectHashMap) workingMemory.getNodeMemory(this); final Iterator it = memory.iterator(); // iterates over all propagated handles and assert them to the new sink for (ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next()) { sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory); } }
public ObjectSinkPropagator removeObjectSink(final ObjectSink sink) { this.sinks = null; // dirty it, so it'll rebuild on next get if (sink.getType() == NodeTypeEnums.AlphaNode) { final AlphaNode alphaNode = (AlphaNode) sink; final AlphaNodeFieldConstraint fieldConstraint = alphaNode.getConstraint(); if (fieldConstraint instanceof IndexableConstraint) { final IndexableConstraint indexableConstraint = (IndexableConstraint) fieldConstraint; final FieldValue value = indexableConstraint.getField(); if (isHashable(indexableConstraint)) { final InternalReadAccessor fieldAccessor = indexableConstraint.getFieldExtractor(); final int index = fieldAccessor.getIndex(); final FieldIndex fieldIndex = unregisterFieldIndex(index); if (fieldIndex.isHashed()) { HashKey hashKey = new HashKey(index, value, fieldAccessor); this.hashedSinkMap.remove(hashKey); if (fieldIndex.getCount() <= this.alphaNodeHashingThreshold - 1) { // we have less than three so unhash unHashSinks(fieldIndex); } } else { this.hashableSinks.remove(alphaNode); } if (this.hashableSinks != null && this.hashableSinks.isEmpty()) { this.hashableSinks = null; } return size() == 1 ? new SingleObjectSinkAdapter(getSinks()[0]) : this; } } } this.otherSinks.remove((ObjectSinkNode) sink); if (this.otherSinks.isEmpty()) { this.otherSinks = null; } return size() == 1 ? new SingleObjectSinkAdapter(getSinks()[0]) : this; }
public void updateSink( final ObjectSink sink, final PropagationContext context, final InternalWorkingMemory workingMemory) { if (lrUnlinkingEnabled) { // Update sink taking into account L&R unlinking peculiarities updateLRUnlinking(sink, context, workingMemory); } else { // Regular updateSink final ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory(this); Iterator it = memory.iterator(); for (ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next()) { sink.assertObject((InternalFactHandle) entry.getValue(), context, workingMemory); } } }
public ObjectSinkPropagator addObjectSink(ObjectSink sink, int alphaNodeHashingThreshold) { this.sinks = null; // dirty it, so it'll rebuild on next get if (sink.getType() == NodeTypeEnums.AlphaNode) { final AlphaNode alphaNode = (AlphaNode) sink; final InternalReadAccessor readAccessor = getHashableAccessor(alphaNode); if (readAccessor != null) { final int index = readAccessor.getIndex(); final FieldIndex fieldIndex = registerFieldIndex(index, readAccessor); // DROOLS-678 : prevent null values from being hashed as 0s final FieldValue value = ((IndexableConstraint) alphaNode.getConstraint()).getField(); if (fieldIndex.getCount() >= this.alphaNodeHashingThreshold && this.alphaNodeHashingThreshold != 0 && !value.isNull()) { if (!fieldIndex.isHashed()) { hashSinks(fieldIndex); } // no need to check, we know the sink does not exist this.hashedSinkMap.put( new HashKey(index, value, fieldIndex.getFieldExtractor()), alphaNode, false); } else { if (this.hashableSinks == null) { this.hashableSinks = new ObjectSinkNodeList(); } this.hashableSinks.add(alphaNode); } return this; } } if (this.otherSinks == null) { this.otherSinks = new ObjectSinkNodeList(); } this.otherSinks.add((ObjectSinkNode) sink); return this; }