/** * precondition : 'attribute' MUST contain number values only * * @param cases * @param attribute * @return DefaultXYDataset with all values of 'attribute' against event numbers or timestamps. */ private XYSeriesCollection getDataAttributes( String[] attributes, boolean byTime, double timesize) { XYSeriesCollection result = new XYSeriesCollection(); for (int index = 0; index < attributes.length; index++) { String attribute = attributes[index]; Integer i = 0; XYSeries row = new XYSeries(attribute); for (ProcessInstance pi : mylog.getInstances()) { Integer x = 0; // count event number Date begin = pi.getAuditTrailEntries().first().getTimestamp(); // starting // time // of // process // instance for (AuditTrailEntry ate : pi.getListOfATEs()) { if (ate.getAttributes().containsKey(attribute)) { Double val; val = Double.valueOf(ate.getAttributes().get(attribute)); if (byTime) { row.add(timediff(begin, ate.getTimestamp()) / timesize, val.doubleValue()); } else { row.add(x.doubleValue(), val.doubleValue()); } } x++; // event number in case } i++; // case number } result.addSeries(row); } return result; }
/** @return String[] with all unique attributes on element level */ private String[] getAttributes() { HashSet<String> results = new HashSet(); for (ProcessInstance pi : mylog.getInstances()) { for (AuditTrailEntry ate : pi.getListOfATEs()) { results.addAll(ate.getAttributes().keySet()); } } String[] t = new String[results.size()]; return results.toArray(t); }
/** * Filters a single process instance. * * @param instance the process instance to filter * @return true if the whole process instance passes the filter, false if the process insstance * should be discarded */ protected boolean doFiltering(ProcessInstance instance) { // creating the artificial end task ate AuditTrailEntry ate = new AuditTrailEntryImpl(); ate.setElement(eventName); ate.setType(eventType); // ate.setTimestamp(new Date(System.currentTimeMillis() + 100000)); // // future ate.setOriginator("Artificial (ProM)"); // adding the new end ate at the last position try { instance.getAuditTrailEntryList().append(ate); } catch (IOException e) { Message.add("Fatal error in class " + this.getClass() + ":", Message.ERROR); return false; } return true; }
private IntervalXYDataset getHistrogrammedDataAttributes( String[] attributes, long barsize, long timesize) { IntervalXYDataset dataset = null; if (no_intervals) { dataset = new XYSeriesCollection(); } else { dataset = new YIntervalSeriesCollection(); } for (int index = 0; index < attributes.length; index++) { Histogram histogram = new Histogram(barsize); String attribute = attributes[index]; for (ProcessInstance pi : mylog.getInstances()) { Date begin; try { begin = pi.getAuditTrailEntryList().get(0).getTimestamp(); } catch (Exception e) { Message.add(e.getMessage(), Message.ERROR); return null; } // starting time of process instance int j = 0; for (AuditTrailEntry ate : pi.getListOfATEs()) { if (ate.getAttributes().containsKey(attribute)) { Double val; val = Double.valueOf(ate.getAttributes().get(attribute)); if (xbox.getSelectedIndex() == 2) { histogram.addValue(j, val); } if (xbox.getSelectedIndex() == 3) { histogram.addValue(timediff(begin, ate.getTimestamp()), val); } j++; } } } if (no_intervals) { ((XYSeriesCollection) dataset).addSeries(histogram.getXYSeries(attribute, timesize)); } else { ((YIntervalSeriesCollection) dataset) .addSeries(histogram.getYIntervalSeries(attribute, timesize)); } } return dataset; }
/** * Constructor method. * * @param pi process instance to be stored. */ public FilterPerWorkflowModelElementAndEvent(ProcessInstance pi, int numberSimilarPIs) { if (pi != null) { AuditTrailEntryListImpl ates = null; try { ates = new AuditTrailEntryListImpl(); for (int i = 0; i < pi.getAuditTrailEntryList().size(); i++) { AuditTrailEntry ate = new AuditTrailEntryImpl(); ate.setElement(pi.getAuditTrailEntryList().get(i).getElement()); ate.setType(pi.getAuditTrailEntryList().get(i).getType()); ates.append(ate); } } catch (IOException ioe) { System.err.println( "Could not create an AuditTrailEntryListImpl in constructor of the class org.processmining.exporting.log.util.FilterPerWorkflowModelElementAndEvent!"); } if (pi.getAttributes().containsKey(IDENTIFIERS)) { processInstance = new ProcessInstanceImpl(pi.getProcess(), ates, pi.getModelReferences()); processInstance.setDataAttributes(pi.getDataAttributes()); processInstance.setName(pi.getName()); processInstance.setDescription(pi.getDescription()); } else { processInstance = new ProcessInstanceImpl(pi.getProcess(), ates, pi.getModelReferences()); processInstance.setDataAttributes(new DataSection()); processInstance.setName(pi.getName()); processInstance.setDescription(pi.getDescription()); addGroupedPiIdentifier(pi.getName()); } setNumberSimilarPIs(Integer.toString(numberSimilarPIs)); } else { throw new NullPointerException("pi equals null!"); } }
/** * Fires a element even if it is not enabled. When the element has duplicates, it looks ahead to * set which duplicate to fire. * * <p><b>Note:</b> The element MUST be in the net. * * @param element element to be fired. * @param pi process instance where the element to be fired is. * @param elementPositionInPi element position. * @return int number of tokens that needed to be added to fire this element. */ public int fire(int element, ProcessInstance pi, int elementPositionInPi) { int addedTokens = 0; int elementDuplicates; if ((hNet.getReverseDuplicatesMapping()[element]).size() == 1) { elementDuplicates = hNet.getReverseDuplicatesMapping()[element].get(0); } else { // identify which duplicate to fire HNSubSet duplicates = hNet.getReverseDuplicatesMapping()[element].deepCopy(); // getting the duplicates that are enabled for (int i = 0; i < duplicates.size(); i++) { if (!isEnabled(duplicates.get(i))) { duplicates.remove(duplicates.get(i)); } } if (duplicates.size() > 0) { if (duplicates.size() == 1) { elementDuplicates = duplicates.get(0); } else { // getting the output tasks of the duplicates. These output // are used to // look ahead at the process instance HNSubSet unionMappedToATEsCode = getAllOutputElementsOfDuplicates(duplicates); AuditTrailEntryList ATEntriesList = pi.getAuditTrailEntryList(); // advancing the pointer in the ATEntries till the current // element + 1 AuditTrailEntry ATEntry; int elementInATE = -1; for (int i = elementPositionInPi + 1; i < ATEntriesList.size(); i++) { try { ATEntry = ATEntriesList.get(i); elementInATE = this.hNet .getLogEvents() .findLogEventNumber(ATEntry.getElement(), ATEntry.getType()); if (unionMappedToATEsCode.contains(elementInATE)) { break; } } catch (IOException ex) { break; } catch (IndexOutOfBoundsException ex) { break; } } elementDuplicates = identifyDuplicateToFire(duplicates, elementInATE); } } else { // because no duplicate is enabled, a random one is chosen to // fire... elementDuplicates = (hNet.getReverseDuplicatesMapping()[element]) .get(generator.nextInt(hNet.getReverseDuplicatesMapping()[element].size())); } } bestCombination = findBestSetTasks(elementDuplicates); addedTokens += bestCombination.getNumberMissingTokens(); removeTokensOutputPlaces(elementDuplicates, bestCombination.getTasks()); addTokensOutputPlaces(elementDuplicates); addToPossiblyEnabledElements(elementDuplicates); // registering the firing of element... hNet.increaseElementActualFiring( elementDuplicates, MethodsForWorkflowLogDataStructures.getNumberSimilarProcessInstances(pi)); // updating the arc usage for the individual... hNet.increaseArcUsage( bestCombination.getElementToFire(), bestCombination.getTasks(), MethodsForWorkflowLogDataStructures.getNumberSimilarProcessInstances(pi)); return addedTokens; }