public LocatorDatasetReader read() throws IOException { selectedLocator = locatorWithFallbacks; DatasetWithFMI originalDatasetWithFMI = null; do { try { originalDatasetWithFMI = readFrom(selectedLocator); } catch (IOException e) { LOG.info( "Failed to read Data Set with iuid={} from {}@{}", selectedLocator.iuid, selectedLocator.getFilePath(), selectedLocator.getStorageSystem(), e); selectedLocator = selectedLocator.getFallbackLocator(); if (selectedLocator == null) throw e; LOG.info("Try read Data Set from alternative location"); } } while (originalDatasetWithFMI == null); Attributes dataset = originalDatasetWithFMI.getDataset(); if (context.getRemoteAE() != null) { storescuService.coerceFileBeforeMerge(selectedLocator, dataset, context); } dataset = Utils.mergeAndNormalize(dataset, (Attributes) selectedLocator.getObject()); if (context.getRemoteAE() != null) { storescuService.coerceAttributes(dataset, context); } datasetWithFMI = new DatasetWithFMI(originalDatasetWithFMI.getFileMetaInformation(), dataset); return this; }
@Override protected DataWriter createDataWriter(ArchiveInstanceLocator inst, String tsuid) throws IOException, UnsupportedStoreSCUException { if (inst == null || !(inst instanceof ArchiveInstanceLocator)) throw new UnsupportedStoreSCUException("Unable to send instance"); ArchiveAEExtension arcAEExt = context.getLocalAE().getAEExtension(ArchiveAEExtension.class); Attributes attrs = null; do { try { attrs = readFrom(inst); } catch (IOException e) { LOG.info( "Failed to read Data Set with iuid={} from {}@{}", inst.iuid, inst.getFilePath(), inst.getStorageSystem(), e); inst = inst.getFallbackLocator(); if (inst == null) { throw e; } LOG.info("Try to read Data Set from alternative location"); } } while (attrs == null); // check for suppression criteria if (context.getRemoteAE() != null) { String templateURI = arcAEExt .getRetrieveSuppressionCriteria() .getSuppressionCriteriaMap() .get(context.getRemoteAE().getAETitle()); if (templateURI != null) inst = service.applySuppressionCriteria(inst, attrs, templateURI, context); } service.coerceFileBeforeMerge(inst, attrs, context); // here we merge file attributes with attributes in the blob attrs = Utils.mergeAndNormalize(attrs, (Attributes) inst.getObject()); service.coerceAttributes(attrs, context); if (!tsuid.equals(inst.tsuid)) Decompressor.decompress(attrs, inst.tsuid); return new DataWriterAdapter(attrs); }