public static void readLeftTuples(MarshallerReaderContext context) throws IOException, ClassNotFoundException { ObjectInputStream stream = context.stream; while (stream.readShort() == PersisterEnums.LEFT_TUPLE) { int nodeId = stream.readInt(); LeftTupleSink sink = (LeftTupleSink) context.sinks.get(nodeId); int factHandleId = stream.readInt(); LeftTuple leftTuple = sink.createLeftTuple(context.handles.get(factHandleId), sink, true); readLeftTuple(leftTuple, context); } }
public static void writeLeftTuple( LeftTuple leftTuple, MarshallerWriteContext context, boolean recurse) throws IOException { ObjectOutputStream stream = context.stream; InternalRuleBase ruleBase = context.ruleBase; InternalWorkingMemory wm = context.wm; LeftTupleSink sink = leftTuple.getLeftTupleSink(); switch (sink.getType()) { case NodeTypeEnums.JoinNode: { // context.out.println( "JoinNode" ); for (LeftTuple childLeftTuple = leftTuple.getFirstChild(); childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) { stream.writeShort(PersisterEnums.RIGHT_TUPLE); int childSinkId = childLeftTuple.getLeftTupleSink().getId(); stream.writeInt(childSinkId); stream.writeInt(childLeftTuple.getRightParent().getFactHandle().getId()); // context.out.println( "RightTuple int:" + childLeftTuple.getLeftTupleSink().getId() + // " int:" + childLeftTuple.getRightParent().getFactHandle().getId() ); writeLeftTuple(childLeftTuple, context, recurse); } stream.writeShort(PersisterEnums.END); // context.out.println( "JoinNode --- END" ); break; } case NodeTypeEnums.QueryRiaFixerNode: case NodeTypeEnums.EvalConditionNode: { // context.out.println( ".... EvalConditionNode" ); for (LeftTuple childLeftTuple = leftTuple.getFirstChild(); childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) { stream.writeShort(PersisterEnums.LEFT_TUPLE); stream.writeInt(childLeftTuple.getLeftTupleSink().getId()); writeLeftTuple(childLeftTuple, context, recurse); } stream.writeShort(PersisterEnums.END); // context.out.println( "---- EvalConditionNode --- END" ); break; } case NodeTypeEnums.NotNode: case NodeTypeEnums.ForallNotNode: { if (leftTuple.getBlocker() == null) { // is not blocked so has children stream.writeShort(PersisterEnums.LEFT_TUPLE_NOT_BLOCKED); for (LeftTuple childLeftTuple = leftTuple.getFirstChild(); childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) { stream.writeShort(PersisterEnums.LEFT_TUPLE); stream.writeInt(childLeftTuple.getLeftTupleSink().getId()); writeLeftTuple(childLeftTuple, context, recurse); } stream.writeShort(PersisterEnums.END); } else { stream.writeShort(PersisterEnums.LEFT_TUPLE_BLOCKED); stream.writeInt(leftTuple.getBlocker().getFactHandle().getId()); } break; } case NodeTypeEnums.ExistsNode: { if (leftTuple.getBlocker() == null) { // is blocked so has children stream.writeShort(PersisterEnums.LEFT_TUPLE_NOT_BLOCKED); } else { stream.writeShort(PersisterEnums.LEFT_TUPLE_BLOCKED); stream.writeInt(leftTuple.getBlocker().getFactHandle().getId()); for (LeftTuple childLeftTuple = leftTuple.getFirstChild(); childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) { stream.writeShort(PersisterEnums.LEFT_TUPLE); stream.writeInt(childLeftTuple.getLeftTupleSink().getId()); writeLeftTuple(childLeftTuple, context, recurse); } stream.writeShort(PersisterEnums.END); } break; } case NodeTypeEnums.AccumulateNode: { // context.out.println( ".... AccumulateNode" ); // accumulate nodes generate new facts on-demand and need special procedures when // serializing to persistent storage AccumulateMemory memory = (AccumulateMemory) context.wm.getNodeMemory((BetaNode) sink); AccumulateContext accctx = (AccumulateContext) leftTuple.getObject(); // first we serialize the generated fact handle writeFactHandle( context, stream, context.objectMarshallingStrategyStore, accctx.result.getFactHandle()); // then we serialize the associated accumulation context stream.writeObject(accctx.context); // then we serialize the boolean propagated flag stream.writeBoolean(accctx.propagated); // then we serialize all the propagated tuples for (LeftTuple childLeftTuple = leftTuple.getFirstChild(); childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) { if (leftTuple.getLeftTupleSink().getId() == childLeftTuple.getLeftTupleSink().getId()) { // this is a matching record, so, associate the right tuples // context.out.println( "RightTuple(match) int:" + // childLeftTuple.getLeftTupleSink().getId() + " int:" + // childLeftTuple.getRightParent().getFactHandle().getId() ); stream.writeShort(PersisterEnums.RIGHT_TUPLE); stream.writeInt(childLeftTuple.getRightParent().getFactHandle().getId()); } else { // this is a propagation record // context.out.println( "RightTuple(propagation) int:" + // childLeftTuple.getLeftTupleSink().getId() + " int:" + // childLeftTuple.getRightParent().getFactHandle().getId() ); stream.writeShort(PersisterEnums.LEFT_TUPLE); int sinkId = childLeftTuple.getLeftTupleSink().getId(); stream.writeInt(sinkId); writeLeftTuple(childLeftTuple, context, recurse); } } stream.writeShort(PersisterEnums.END); // context.out.println( "---- AccumulateNode --- END" ); break; } case NodeTypeEnums.RightInputAdaterNode: { // context.out.println( ".... RightInputAdapterNode" ); // RIANs generate new fact handles on-demand to wrap tuples and need special procedures // when serializing to persistent storage ObjectHashMap memory = (ObjectHashMap) context.wm.getNodeMemory((NodeMemory) sink); InternalFactHandle ifh = (InternalFactHandle) memory.get(leftTuple); // first we serialize the generated fact handle ID // context.out.println( "FactHandle id:"+ifh.getId() ); stream.writeInt(ifh.getId()); stream.writeLong(ifh.getRecency()); writeRightTuples(ifh, context); stream.writeShort(PersisterEnums.END); // context.out.println( "---- RightInputAdapterNode --- END" ); break; } case NodeTypeEnums.FromNode: { // context.out.println( ".... FromNode" ); // FNs generate new fact handles on-demand to wrap objects and need special procedures // when serializing to persistent storage FromMemory memory = (FromMemory) context.wm.getNodeMemory((NodeMemory) sink); Map<Object, RightTuple> matches = (Map<Object, RightTuple>) leftTuple.getObject(); for (RightTuple rightTuples : matches.values()) { // first we serialize the generated fact handle ID stream.writeShort(PersisterEnums.FACT_HANDLE); writeFactHandle( context, stream, context.objectMarshallingStrategyStore, rightTuples.getFactHandle()); writeRightTuples(rightTuples.getFactHandle(), context); } stream.writeShort(PersisterEnums.END); for (LeftTuple childLeftTuple = leftTuple.getFirstChild(); childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) { stream.writeShort(PersisterEnums.RIGHT_TUPLE); stream.writeInt(childLeftTuple.getLeftTupleSink().getId()); stream.writeInt(childLeftTuple.getRightParent().getFactHandle().getId()); // context.out.println( "RightTuple int:" + childLeftTuple.getLeftTupleSink().getId() + // " int:" + childLeftTuple.getRightParent().getFactHandle().getId() ); writeLeftTuple(childLeftTuple, context, recurse); } stream.writeShort(PersisterEnums.END); // context.out.println( "---- FromNode --- END" ); break; } case NodeTypeEnums.UnificationNode: { // context.out.println( ".... UnificationNode" ); QueryElementNode node = (QueryElementNode) sink; boolean isOpen = node.isOpenQuery(); context.writeBoolean(isOpen); if (isOpen) { InternalFactHandle factHandle = (InternalFactHandle) leftTuple.getObject(); DroolsQuery query = (DroolsQuery) factHandle.getObject(); // context.out.println( "factHandle:" + factHandle ); factHandle.setObject(null); writeFactHandle(context, stream, context.objectMarshallingStrategyStore, 0, factHandle); factHandle.setObject(query); writeLeftTuples(context, new InternalFactHandle[] {factHandle}); } else { for (LeftTuple childLeftTuple = leftTuple.getFirstChild(); childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext()) { stream.writeShort(PersisterEnums.LEFT_TUPLE); stream.writeInt(childLeftTuple.getLeftTupleSink().getId()); InternalFactHandle factHandle = childLeftTuple.getLastHandle(); writeFactHandle( context, stream, context.objectMarshallingStrategyStore, 1, factHandle); writeLeftTuple(childLeftTuple, context, recurse); } stream.writeShort(PersisterEnums.END); } // context.out.println( "---- EvalConditionNode --- END" ); break; } case NodeTypeEnums.RuleTerminalNode: { // context.out.println( "RuleTerminalNode" ); int pos = context.terminalTupleMap.size(); context.terminalTupleMap.put(leftTuple, pos); break; } case NodeTypeEnums.QueryTerminalNode: { // context.out.println( ".... QueryTerminalNode" ); // LeftTuple entry = leftTuple; // // // find the DroolsQuery object // while ( entry.getParent() != null ) { // entry = entry.getParent(); // } // // // Now output all the child tuples in the caller network // DroolsQuery query = (DroolsQuery) entry.getLastHandle().getObject(); // if ( query.getQueryResultCollector() instanceof // UnificationNodeViewChangedEventListener ) { // context.writeBoolean( true ); // UnificationNodeViewChangedEventListener collector = // (UnificationNodeViewChangedEventListener) query.getQueryResultCollector(); // leftTuple = collector.getLeftTuple(); // context.writeBoolean(true); RightTuple rightTuple = (RightTuple) leftTuple.getObject(); // context.out.println( "rightTuple:" + rightTuple.getFactHandle() ); writeFactHandle( context, stream, context.objectMarshallingStrategyStore, 1, rightTuple.getFactHandle()); for (LeftTuple childLeftTuple = rightTuple.firstChild; childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getRightParentNext()) { stream.writeShort(PersisterEnums.LEFT_TUPLE); stream.writeInt(childLeftTuple.getLeftTupleSink().getId()); writeLeftTuple(childLeftTuple, context, recurse); } // for ( LeftTuple childLeftTuple = leftTuple.getFirstChild(); // childLeftTuple != null; childLeftTuple = (LeftTuple) childLeftTuple.getLeftParentNext() // ) { // stream.writeShort( PersisterEnums.LEFT_TUPLE ); // stream.writeInt( childLeftTuple.getLeftTupleSink().getId() ); // writeFactHandle( context, // stream, // context.objectMarshallingStrategyStore, // 1, // childLeftTuple.getLastHandle() ); // writeLeftTuple( childLeftTuple, // context, // recurse ); // } // } else { // context.writeBoolean( false ); // } stream.writeShort(PersisterEnums.END); // context.out.println( "---- QueryTerminalNode --- END" ); break; } } }
public static void readLeftTuple(LeftTuple parentLeftTuple, MarshallerReaderContext context) throws IOException, ClassNotFoundException { ObjectInputStream stream = context.stream; Map<Integer, BaseNode> sinks = context.sinks; LeftTupleSink sink = parentLeftTuple.getLeftTupleSink(); switch (sink.getType()) { case NodeTypeEnums.JoinNode: { BetaMemory memory = (BetaMemory) context.wm.getNodeMemory((BetaNode) sink); addToLeftMemory(parentLeftTuple, memory); while (stream.readShort() == PersisterEnums.RIGHT_TUPLE) { int childSinkId = stream.readInt(); LeftTupleSink childSink = (LeftTupleSink) sinks.get(childSinkId); int factHandleId = stream.readInt(); RightTupleKey key = new RightTupleKey(factHandleId, sink); RightTuple rightTuple = context.rightTuples.get(key); LeftTuple childLeftTuple = childSink.createLeftTuple(parentLeftTuple, rightTuple, null, null, childSink, true); readLeftTuple(childLeftTuple, context); } break; } case NodeTypeEnums.EvalConditionNode: { while (stream.readShort() == PersisterEnums.LEFT_TUPLE) { LeftTupleSink childSink = (LeftTupleSink) sinks.get(stream.readInt()); LeftTuple childLeftTuple = childSink.createLeftTuple(parentLeftTuple, childSink, true); readLeftTuple(childLeftTuple, context); } break; } case NodeTypeEnums.NotNode: case NodeTypeEnums.ForallNotNode: { BetaMemory memory = (BetaMemory) context.wm.getNodeMemory((BetaNode) sink); int type = stream.readShort(); if (type == PersisterEnums.LEFT_TUPLE_NOT_BLOCKED) { addToLeftMemory(parentLeftTuple, memory); while (stream.readShort() == PersisterEnums.LEFT_TUPLE) { LeftTupleSink childSink = (LeftTupleSink) sinks.get(stream.readInt()); LeftTuple childLeftTuple = childSink.createLeftTuple(parentLeftTuple, childSink, true); readLeftTuple(childLeftTuple, context); } } else { int factHandleId = stream.readInt(); RightTupleKey key = new RightTupleKey(factHandleId, sink); RightTuple rightTuple = context.rightTuples.get(key); parentLeftTuple.setBlocker(rightTuple); rightTuple.addBlocked(parentLeftTuple); } break; } case NodeTypeEnums.ExistsNode: { BetaMemory memory = (BetaMemory) context.wm.getNodeMemory((BetaNode) sink); int type = stream.readShort(); if (type == PersisterEnums.LEFT_TUPLE_NOT_BLOCKED) { addToLeftMemory(parentLeftTuple, memory); } else { int factHandleId = stream.readInt(); RightTupleKey key = new RightTupleKey(factHandleId, sink); RightTuple rightTuple = context.rightTuples.get(key); parentLeftTuple.setBlocker(rightTuple); rightTuple.addBlocked(parentLeftTuple); while (stream.readShort() == PersisterEnums.LEFT_TUPLE) { LeftTupleSink childSink = (LeftTupleSink) sinks.get(stream.readInt()); LeftTuple childLeftTuple = childSink.createLeftTuple(parentLeftTuple, childSink, true); readLeftTuple(childLeftTuple, context); } } break; } case NodeTypeEnums.AccumulateNode: { // accumulate nodes generate new facts on-demand and need special procedures when // de-serializing from persistent storage AccumulateMemory memory = (AccumulateMemory) context.wm.getNodeMemory((BetaNode) sink); memory.betaMemory.getLeftTupleMemory().add(parentLeftTuple); AccumulateContext accctx = new AccumulateContext(); parentLeftTuple.setObject(accctx); // first we de-serialize the generated fact handle InternalFactHandle handle = readFactHandle(context); accctx.result = new RightTuple(handle, (RightTupleSink) sink); // then we de-serialize the associated accumulation context accctx.context = (Serializable[]) stream.readObject(); // then we de-serialize the boolean propagated flag accctx.propagated = stream.readBoolean(); // then we de-serialize all the propagated tuples short head = -1; while ((head = stream.readShort()) != PersisterEnums.END) { switch (head) { case PersisterEnums.RIGHT_TUPLE: { int factHandleId = stream.readInt(); RightTupleKey key = new RightTupleKey(factHandleId, sink); RightTuple rightTuple = context.rightTuples.get(key); // just wiring up the match record sink.createLeftTuple(parentLeftTuple, rightTuple, null, null, sink, true); break; } case PersisterEnums.LEFT_TUPLE: { int sinkId = stream.readInt(); LeftTupleSink childSink = (LeftTupleSink) sinks.get(sinkId); LeftTuple childLeftTuple = new LeftTupleImpl(parentLeftTuple, accctx.result, childSink, true); readLeftTuple(childLeftTuple, context); break; } default: { throw new RuntimeDroolsException( "Marshalling error. This is a bug. Please contact the development team."); } } } break; } case NodeTypeEnums.RightInputAdaterNode: { // RIANs generate new fact handles on-demand to wrap tuples and need special procedures // when de-serializing from persistent storage ObjectHashMap memory = (ObjectHashMap) context.wm.getNodeMemory((NodeMemory) sink); // create fact handle int id = stream.readInt(); long recency = stream.readLong(); InternalFactHandle handle = new DefaultFactHandle( id, parentLeftTuple, recency, context.wm.getEntryPoints().get(EntryPoint.DEFAULT.getEntryPointId())); memory.put(parentLeftTuple, handle); readRightTuples(handle, context); stream.readShort(); // Persistence.END break; } case NodeTypeEnums.FromNode: { // context.out.println( "FromNode" ); // FNs generate new fact handles on-demand to wrap objects and need special procedures // when serializing to persistent storage FromMemory memory = (FromMemory) context.wm.getNodeMemory((NodeMemory) sink); memory.betaMemory.getLeftTupleMemory().add(parentLeftTuple); Map<Object, RightTuple> matches = new LinkedHashMap<Object, RightTuple>(); parentLeftTuple.setObject(matches); while (stream.readShort() == PersisterEnums.FACT_HANDLE) { // we de-serialize the generated fact handle ID InternalFactHandle handle = readFactHandle(context); context.handles.put(handle.getId(), handle); readRightTuples(handle, context); matches.put(handle.getObject(), handle.getFirstRightTuple()); } while (stream.readShort() == PersisterEnums.RIGHT_TUPLE) { LeftTupleSink childSink = (LeftTupleSink) sinks.get(stream.readInt()); int factHandleId = stream.readInt(); RightTupleKey key = new RightTupleKey( factHandleId, null); // created tuples in from node always use null sink RightTuple rightTuple = context.rightTuples.get(key); LeftTuple childLeftTuple = new LeftTupleImpl(parentLeftTuple, rightTuple, childSink, true); readLeftTuple(childLeftTuple, context); } // context.out.println( "FromNode --- END" ); break; } case NodeTypeEnums.UnificationNode: { boolean isOpen = context.readBoolean(); if (isOpen) { QueryElementNode node = (QueryElementNode) sink; InternalFactHandle handle = readFactHandle(context); context.handles.put(handle.getId(), handle); node.createDroolsQuery(parentLeftTuple, handle, context.wm); readLeftTuples(context); } else { while (stream.readShort() == PersisterEnums.LEFT_TUPLE) { LeftTupleSink childSink = (LeftTupleSink) sinks.get(stream.readInt()); // we de-serialize the generated fact handle ID InternalFactHandle handle = readFactHandle(context); context.handles.put(handle.getId(), handle); RightTuple rightTuple = new RightTuple(handle); // @TODO check if open query LeftTuple childLeftTuple = new LeftTupleImpl(parentLeftTuple, rightTuple, childSink, true); readLeftTuple(childLeftTuple, context); } } break; } case NodeTypeEnums.RuleTerminalNode: { int pos = context.terminalTupleMap.size(); context.terminalTupleMap.put(pos, parentLeftTuple); break; } case NodeTypeEnums.QueryTerminalNode: { boolean unificationNode = context.readBoolean(); if (unificationNode) { // we de-serialize the generated fact handle ID InternalFactHandle handle = readFactHandle(context); context.handles.put(handle.getId(), handle); RightTuple rightTuple = new RightTuple(handle); parentLeftTuple.setObject(rightTuple); LeftTuple entry = parentLeftTuple; // find the DroolsQuery object while (entry.getParent() != null) { entry = entry.getParent(); } DroolsQuery query = (DroolsQuery) entry.getLastHandle().getObject(); LeftTuple leftTuple = ((UnificationNodeViewChangedEventListener) query.getQueryResultCollector()) .getLeftTuple(); while (stream.readShort() == PersisterEnums.LEFT_TUPLE) { LeftTupleSink childSink = (LeftTupleSink) sinks.get(stream.readInt()); // @TODO check if open query!!! LeftTuple childLeftTuple = childSink.createLeftTuple(leftTuple, rightTuple, childSink); readLeftTuple(childLeftTuple, context); } } break; } } }
public static ReteooStatefulSession readSession( ReteooStatefulSession session, DefaultAgenda agenda, long time, boolean multithread, MarshallerReaderContext context) throws IOException, ClassNotFoundException { if (session.getTimerService() instanceof PseudoClockScheduler) { PseudoClockScheduler clock = (PseudoClockScheduler) session.getTimerService(); clock.advanceTime(time, TimeUnit.MILLISECONDS); } // RuleFlowGroups need to reference the session for (RuleFlowGroup group : agenda.getRuleFlowGroupsMap().values()) { ((RuleFlowGroupImpl) group).setWorkingMemory(session); } context.wm = session; context.handles.put( context.wm.getInitialFactHandle().getId(), context.wm.getInitialFactHandle()); if (context.stream.readBoolean()) { InternalFactHandle initialFactHandle = context.wm.getInitialFactHandle(); int sinkId = context.stream.readInt(); ObjectTypeNode initialFactNode = (ObjectTypeNode) context.sinks.get(sinkId); if (initialFactNode == null) { // ------ START RANT ------ // The following code is as bad as it looks, but since I was so far // unable to convince Mark that creating OTNs on demand is really bad, // I have to continue doing it :) EntryPointNode defaultEPNode = context.ruleBase.getRete().getEntryPointNode(EntryPoint.DEFAULT); BuildContext buildContext = new BuildContext( context.ruleBase, context.ruleBase.getReteooBuilder().getIdGenerator()); buildContext.setPartitionId(RuleBasePartitionId.MAIN_PARTITION); buildContext.setObjectTypeNodeMemoryEnabled(true); initialFactNode = new ObjectTypeNode( sinkId, defaultEPNode, ClassObjectType.InitialFact_ObjectType, buildContext); // isn't contention something everybody loves? context.ruleBase.lock(); try { // Yeah, I know, because one session is being deserialized, we go and lock all of them... initialFactNode.attach(buildContext); } finally { context.ruleBase.unlock(); } // ------- END RANT ----- } ObjectHashSet initialFactMemory = (ObjectHashSet) context.wm.getNodeMemory(initialFactNode); initialFactMemory.add(initialFactHandle); readRightTuples(initialFactHandle, context); } while (context.readShort() == PersisterEnums.ENTRY_POINT) { String entryPointId = context.stream.readUTF(); WorkingMemoryEntryPoint wmep = context.wm.getEntryPoints().get(entryPointId); readFactHandles(context, ((NamedEntryPoint) wmep).getObjectStore()); } InternalFactHandle handle = context.wm.getInitialFactHandle(); while (context.stream.readShort() == PersisterEnums.LEFT_TUPLE) { LeftTupleSink sink = (LeftTupleSink) context.sinks.get(context.stream.readInt()); LeftTuple leftTuple = sink.createLeftTuple(handle, sink, true); readLeftTuple(leftTuple, context); } readPropagationContexts(context); readActivations(context); readActionQueue(context); readTruthMaintenanceSystem(context); if (processMarshaller != null) { processMarshaller.readProcessInstances(context); } else { short type = context.stream.readShort(); if (PersisterEnums.END != type) { throw new IllegalStateException( "No process marshaller, unable to unmarshall type: " + type); } } if (processMarshaller != null) { processMarshaller.readWorkItems(context); } else { short type = context.stream.readShort(); if (PersisterEnums.END != type) { throw new IllegalStateException( "No process marshaller, unable to unmarshall type: " + type); } } if (processMarshaller != null) { // This actually does ALL timers, due to backwards compatability issues // It will read in old JBPM binaries, but always write to the new binary format. processMarshaller.readProcessTimers(context); } else { short type = context.stream.readShort(); if (PersisterEnums.END != type) { throw new IllegalStateException( "No process marshaller, unable to unmarshall type: " + type); } } // no legacy jBPM timers, so handle locally while (context.readShort() == PersisterEnums.DEFAULT_TIMER) { InputMarshaller.readTimer(context); } if (multithread) { session.startPartitionManagers(); } return session; }