/** * Merges the data into a given consumer. * * @param consumer the consumer of the merge. * @param doCleanUp clean up the state to be able to do further incremental merges. If this is a * one-shot merge, this can be false to improve performance. * @throws MergingException such as a DuplicateDataException or a MergeConsumer.ConsumerException * if something goes wrong */ public void mergeData(@NonNull MergeConsumer<I> consumer, boolean doCleanUp) throws MergingException { consumer.start(mFactory); try { // get all the items keys. Set<String> dataItemKeys = Sets.newHashSet(); for (S dataSet : mDataSets) { // quick check on duplicates in the resource set. dataSet.checkItems(); ListMultimap<String, I> map = dataSet.getDataMap(); dataItemKeys.addAll(map.keySet()); } // loop on all the data items. for (String dataItemKey : dataItemKeys) { if (requiresMerge(dataItemKey)) { // get all the available items, from the lower priority, to the higher // priority List<I> items = Lists.newArrayListWithExpectedSize(mDataSets.size()); for (S dataSet : mDataSets) { // look for the resource key in the set ListMultimap<String, I> itemMap = dataSet.getDataMap(); List<I> setItems = itemMap.get(dataItemKey); items.addAll(setItems); } mergeItems(dataItemKey, items, consumer); continue; } // for each items, look in the data sets, starting from the end of the list. I previouslyWritten = null; I toWrite = null; /* * We are looking for what to write/delete: the last non deleted item, and the * previously written one. */ boolean foundIgnoredItem = false; setLoop: for (int i = mDataSets.size() - 1; i >= 0; i--) { S dataSet = mDataSets.get(i); // look for the resource key in the set ListMultimap<String, I> itemMap = dataSet.getDataMap(); List<I> items = itemMap.get(dataItemKey); if (items.isEmpty()) { continue; } // The list can contain at max 2 items. One touched and one deleted. // More than one deleted means there was more than one which isn't possible // More than one touched means there is more than one and this isn't possible. for (int ii = items.size() - 1; ii >= 0; ii--) { I item = items.get(ii); if (consumer.ignoreItemInMerge(item)) { foundIgnoredItem = true; continue; } if (item.isWritten()) { assert previouslyWritten == null; previouslyWritten = item; } if (toWrite == null && !item.isRemoved()) { toWrite = item; } if (toWrite != null && previouslyWritten != null) { break setLoop; } } } // done searching, we should at least have something, unless we only // found items that are not meant to be written (attr inside declare styleable) assert foundIgnoredItem || previouslyWritten != null || toWrite != null; //noinspection ConstantConditions if (previouslyWritten == null && toWrite == null) { continue; } // now need to handle, the type of each (single res file, multi res file), whether // they are the same object or not, whether the previously written object was deleted. if (toWrite == null) { // nothing to write? delete only then. assert previouslyWritten.isRemoved(); consumer.removeItem(previouslyWritten, null /*replacedBy*/); } else if (previouslyWritten == null || previouslyWritten == toWrite) { // easy one: new or updated res consumer.addItem(toWrite); } else { // replacement of a resource by another. // force write the new value toWrite.setTouched(); consumer.addItem(toWrite); // and remove the old one consumer.removeItem(previouslyWritten, toWrite); } } } finally { consumer.end(); } if (doCleanUp) { // reset all states. We can't just reset the toWrite and previouslyWritten objects // since overlayed items might have been touched as well. // Should also clean (remove) objects that are removed. postMergeCleanUp(); } }
@VisibleForTesting void validateDataSets() throws DuplicateDataException { for (S resourceSet : mDataSets) { resourceSet.checkItems(); } }