public LocatorDatasetReader read() throws IOException { selectedLocator = locatorWithFallbacks; DatasetWithFMI originalDatasetWithFMI = null; do { try { originalDatasetWithFMI = readFrom(selectedLocator); } catch (IOException e) { LOG.info( "Failed to read Data Set with iuid={} from {}@{}", selectedLocator.iuid, selectedLocator.getFilePath(), selectedLocator.getStorageSystem(), e); selectedLocator = selectedLocator.getFallbackLocator(); if (selectedLocator == null) throw e; LOG.info("Try read Data Set from alternative location"); } } while (originalDatasetWithFMI == null); Attributes dataset = originalDatasetWithFMI.getDataset(); if (context.getRemoteAE() != null) { storescuService.coerceFileBeforeMerge(selectedLocator, dataset, context); } dataset = Utils.mergeAndNormalize(dataset, (Attributes) selectedLocator.getObject()); if (context.getRemoteAE() != null) { storescuService.coerceAttributes(dataset, context); } datasetWithFMI = new DatasetWithFMI(originalDatasetWithFMI.getFileMetaInformation(), dataset); return this; }
private List<ArchiveInstanceLocator> filterLocalOrExternalMatches( List<ArchiveInstanceLocator> matches, boolean localMatches) { ArrayList<ArchiveInstanceLocator> filteredMatches = new ArrayList<ArchiveInstanceLocator>(); for (ArchiveInstanceLocator match : matches) { if (localMatches) { if (match.getStorageSystem() != null) filteredMatches.add(match); } else { if (match.getStorageSystem() == null) filteredMatches.add(match); } } return filteredMatches; }
@Override protected DataWriter createDataWriter(ArchiveInstanceLocator inst, String tsuid) throws IOException, UnsupportedStoreSCUException { if (inst == null || !(inst instanceof ArchiveInstanceLocator)) throw new UnsupportedStoreSCUException("Unable to send instance"); ArchiveAEExtension arcAEExt = context.getLocalAE().getAEExtension(ArchiveAEExtension.class); Attributes attrs = null; do { try { attrs = readFrom(inst); } catch (IOException e) { LOG.info( "Failed to read Data Set with iuid={} from {}@{}", inst.iuid, inst.getFilePath(), inst.getStorageSystem(), e); inst = inst.getFallbackLocator(); if (inst == null) { throw e; } LOG.info("Try to read Data Set from alternative location"); } } while (attrs == null); // check for suppression criteria if (context.getRemoteAE() != null) { String templateURI = arcAEExt .getRetrieveSuppressionCriteria() .getSuppressionCriteriaMap() .get(context.getRemoteAE().getAETitle()); if (templateURI != null) inst = service.applySuppressionCriteria(inst, attrs, templateURI, context); } service.coerceFileBeforeMerge(inst, attrs, context); // here we merge file attributes with attributes in the blob attrs = Utils.mergeAndNormalize(attrs, (Attributes) inst.getObject()); service.coerceAttributes(attrs, context); if (!tsuid.equals(inst.tsuid)) Decompressor.decompress(attrs, inst.tsuid); return new DataWriterAdapter(attrs); }
private ArchiveInstanceLocator createArchiveInstanceLocator(StoreContext context) throws InterruptedException, ExecutionException { StorageContext bulkdataContext = context.getBulkdataContext().get(); // waits for the store to finish ArchiveInstanceLocator newLocator = new ArchiveInstanceLocator.Builder( context.getInstance().getSopClassUID(), context.getInstance().getSopInstanceUID(), context.getFileRef().getTransferSyntaxUID()) .storageSystem(context.getStoreSession().getStorageSystem()) .storagePath(bulkdataContext.getFilePath().toString()) .entryName(context.getFileRef().getEntryName()) .fileTimeZoneID(context.getFileRef().getTimeZone()) .retrieveAETs(context.getInstance().getRawRetrieveAETs()) .withoutBulkdata(context.getFileRef().isWithoutBulkData()) .seriesInstanceUID(context.getAttributes().getString(Tag.SeriesInstanceUID)) .studyInstanceUID(context.getAttributes().getString(Tag.StudyInstanceUID)) .build(); byte[] encodedInstanceAttrs = context.getInstance().getAttributesBlob().getEncodedAttributes(); Attributes instanceAttrs = Utils.decodeAttributes(encodedInstanceAttrs); newLocator.setObject(instanceAttrs); return newLocator; }