private Collection getObserverContextItems(String personName) { Dataset ds = dof.newDataset(); ds.putCS(Tags.RelationshipType, "HAS OBS CONTEXT"); ds.putCS(Tags.ValueType, "CODE"); DcmElement cnSq = ds.putSQ(Tags.ConceptNameCodeSeq); Dataset cnDS = cnSq.addNewItem(); cnDS.putSH(Tags.CodeValue, "121005"); cnDS.putSH(Tags.CodingSchemeDesignator, "DCM"); cnDS.putLO(Tags.CodeMeaning, "ObserverType"); DcmElement ccSq = ds.putSQ(Tags.ConceptCodeSeq); Dataset ccDS = ccSq.addNewItem(); ccDS.putSH(Tags.CodeValue, "121006"); ccDS.putSH(Tags.CodingSchemeDesignator, "DCM"); ccDS.putLO(Tags.CodeMeaning, "Person"); Dataset ds1 = dof.newDataset(); ds1.putCS(Tags.RelationshipType, "HAS OBS CONTEXT"); ds1.putCS(Tags.ValueType, "PNAME"); DcmElement cnSq1 = ds1.putSQ(Tags.ConceptNameCodeSeq); Dataset cnDS1 = cnSq1.addNewItem(); cnDS1.putSH(Tags.CodeValue, "121008"); cnDS1.putSH(Tags.CodingSchemeDesignator, "DCM"); cnDS1.putLO(Tags.CodeMeaning, "Person Observer Name"); ds1.putPN(Tags.PersonName, personName); ArrayList col = new ArrayList(); col.add(ds); col.add(ds1); return col; }
/** * Prepares the Dataset representing the search key in C-FIND. As no values are set, the keys * match to every content in the archive. The user has to specify concret values to limit the * searchSee PS 3.4 - C.6.2.1.2 Study level. * * <p>As the result for C-FIND these keys are filled with the values found in the archive. * * @param cfg the configuration properties for this class. * @throws ParseException if a given properties for the keys was not found. */ private void initKeys(ConfigProperties cfg) throws ParseException { // Remove all keys keys = dof.newDataset(); // Query/Retrieve Level. PS 3.4 - C.6.2 Study Root SOP Class Group keys.putCS(Tags.QueryRetrieveLevel, getQueryRetrieveLevel(STUDY_LEVEL)); // UNIQUE STUDY LEVEL KEY FOR THE STUDY. See PS 3.4 - C.6.2.1.2 Study level keys.putUI(Tags.StudyInstanceUID); // REQUIRED STUDY LEVEL KEY FOR THE STUDY. See PS 3.4 - C.6.2.1.2 Study level keys.putDA(Tags.StudyDate); // Not defined: StudyTime // Not defined: AccessionNumber keys.putPN(Tags.PatientName); keys.putLO(Tags.PatientID); // Not defined: StudyID // OPTIONAL STUDY LEVEL KEY FOR THE STUDY. See PS 3.4 - C.6.2.1.2 Study level keys.putUS(Tags.NumberOfStudyRelatedSeries); keys.putUS(Tags.NumberOfStudyRelatedInstances); // mutch more defined... // Add the keys found in the configuration properties addQueryKeys(cfg); }
public void handleNotification(Notification notif, Object handback) { String spsuid = (String) notif.getUserData(); Dataset pps = DcmObjectFactory.getInstance().newDataset(); try { Dataset sps; GPWLManager gpwlmgr = getGPWLManager(); sps = gpwlmgr.getWorklistItem(spsuid); String ppsiuid = spsuid + ppsuidSuffix; String status = sps.getString(Tags.GPSPSStatus); pps.putCS(Tags.GPPPSStatus, status); pps.putUI(Tags.SOPInstanceUID, ppsiuid); Date now = new Date(); if ("IN PROGRESS".equals(status)) { try { getGPPPSManager().getGPPPS(ppsiuid); return; // avoid duplicate N_CREATE } catch (Exception e) { } pps.putSH(Tags.PPSID, "PPS" + ppsiuid.hashCode()); pps.putDA(Tags.PPSStartDate, now); pps.putTM(Tags.PPSStartTime, now); pps.putDA(Tags.PPSEndDate); pps.putTM(Tags.PPSEndTime); for (int i = 0; i < N_CREATE_TYPE2_ATTRS.length; i++) { pps.putXX(N_CREATE_TYPE2_ATTRS[i]); } pps.putAll(sps.subSet(N_CREATE_SPS_ATTRS)); copyCode( copyWorkitemCode, sps.getItem(Tags.ScheduledWorkitemCodeSeq), pps.putSQ(Tags.PerformedWorkitemCodeSeq)); copyCode( copyStationNameCode, sps.getItem(Tags.ScheduledStationNameCodeSeq), pps.putSQ(Tags.PerformedStationNameCodeSeq)); copyCode( copyStationClassCode, sps.getItem(Tags.ScheduledStationClassCodeSeq), pps.putSQ(Tags.PerformedStationClassCodeSeq)); copyCode( copyStationGeographicLocationCode, sps.getItem(Tags.ScheduledStationGeographicLocationCodeSeq), pps.putSQ(Tags.PerformedStationGeographicLocationCodeSeq)); copyCode( copyProcessingApplicationsCode, sps.getItem(Tags.ScheduledProcessingApplicationsCodeSeq), pps.putSQ(Tags.PerformedProcessingApplicationsCodeSeq)); } else if ("COMPLETED".equals(status) || "DISCONTINUED".equals(status)) { pps.putDA(Tags.PPSEndDate, now); pps.putTM(Tags.PPSEndTime, now); pps.putAll(gpwlmgr.getOutputInformation(spsuid)); } else { return; } } catch (Exception e) { log.error("Failed to access GP-SPS[" + spsuid + "]", e); return; } for (int i = 0; i < destAETs.length; i++) { PPSOrder order = new PPSOrder(pps, destAETs[i]); try { log.info("Scheduling " + order); jmsDelegate.queue(queueName, order, Message.DEFAULT_PRIORITY, 0L); } catch (Exception e) { log.error("Failed to schedule " + order, e); } } }
public Vector cGET(Dataset ds) throws ConnectException, IOException, InterruptedException { PresContext pc; List dimseList; Vector datasetVector; // An association must be active if (aassoc == null) { throw new ConnectException("No Association established"); } // Test, if Presentation Context for C-MOVE is supported // API doc: Association.getAcceptedPresContext(String asuid, String tsuid) if ((pc = aassoc .getAssociation() .getAcceptedPresContext( UIDs.StudyRootQueryRetrieveInformationModelGET, UIDs.ExplicitVRLittleEndian)) == null && (pc = aassoc .getAssociation() .getAcceptedPresContext( UIDs.StudyRootQueryRetrieveInformationModelGET, UIDs.ImplicitVRLittleEndian)) == null) { throw new ConnectException( "Association does not support presentation context for StudyRootQueryRetrieveInformationModelMOVE SOP."); } // Get the Study Instance UID of the study to mode String suid = ds.getString(Tags.SOPInstanceUID); // Prepare info for logging String patName = ds.getString(Tags.PatientName); String patID = ds.getString(Tags.PatientID); String studyDate = ds.getString(Tags.StudyDate); String prompt = "Study[" + suid + "] from " + studyDate + " for Patient[" + patID + "]: " + patName; // log.info("Moving: " + prompt); // New Cammand Set, see: DICOM Part 7: Message Exchange, 6.3.1 Command Set Structure Command rqCmd = dof.newCommand(); // API doc: Command.initCMoveRQ(int msgID, String sopClassUID, int priority, String moveDest) rqCmd.initCGetRSP(assoc.nextMsgID(), UIDs.StudyRootQueryRetrieveInformationModelGET, priority); Dataset rqDs = dof.newDataset(); rqDs.putCS(Tags.QueryRetrieveLevel, getQueryRetrieveLevel(STUDY_LEVEL)); // Only Unique Key allowed in C-MOVE. PS 3.4 -C.2.2.1 Attribute Types rqDs.putUI(Tags.SOPInstanceUID, suid); // API doc: AssociationFactorynewDimse(int pcid, Command cmd, Dataset ds) // DIMSE (DICOM Message Service Element) ist ein Nachrichtendienst in DICOM Dimse moveRq = aFact.newDimse(pc.pcid(), rqCmd, rqDs); // Invoke active association with move request Dimse FutureRSP future = aassoc.invoke(moveRq); // Response to the C-MOVE request. // The result cannot be accessed until it has been set. Dimse moveRsp = future.get(); Command rspCmd = moveRsp.getCommand(); if (DEBUG) { StringWriter w = new StringWriter(); w.write("C-FIND RQ Identifier:\n"); keys.dumpDataset(w, null); log.debug(w.toString()); } // Invoke active association with find request Dimse // Response to the C-FIND request. // The result cannot be accessed until it has been set. // Get the list of found objects dimseList = future.listPending(); // >>>> Extract Dataset from Dimse datasetVector = new Vector(); // If no List of DIMSE objects was generated or it is empty return an empty Vector if (dimseList == null || dimseList.isEmpty()) { return datasetVector; } // Process all elements for (int i = 0; i < dimseList.size(); i++) { datasetVector.addElement(((Dimse) dimseList.get(i)).getDataset()); if (((Dimse) dimseList.get(i)).getDataset() == null) System.out.println(" Dataset created succesffullyu "); } // PS 3.7 - 9.3.4 C-MOVE PROTOCOL, 9.3.4.2 C-MOVE-RSP int status = rspCmd.getStatus(); switch (status) { case 0x0000: // log.info("Moved: " + prompt); break; case 0xB000: log.error("One or more failures during move of " + prompt); break; default: log.error("Failed to move " + prompt + "\n\terror tstatus: " + Integer.toHexString(status)); break; } System.out.println("The move sise is : " + datasetVector.size()); return datasetVector; }
/** * Convert an image to RGB. * * @param inFile the file to convert. * @param outFile the output file, which may be same as inFile. * @return the static status result */ public static AnonymizerStatus convert(File inFile, File outFile) { long fileLength = inFile.length(); logger.debug("Entering DICOMPaletteImageConverter.convert"); logger.debug("File length = " + fileLength); BufferedInputStream in = null; BufferedOutputStream out = null; File tempFile = null; byte[] buffer = new byte[4096]; try { // Check that this is a known format. in = new BufferedInputStream(new FileInputStream(inFile)); DcmParser parser = pFact.newDcmParser(in); FileFormat fileFormat = parser.detectFileFormat(); if (fileFormat == null) { throw new IOException("Unrecognized file format: " + inFile); } // Get the dataset (excluding pixels) and leave the input stream open Dataset dataset = oFact.newDataset(); parser.setDcmHandler(dataset.getDcmHandler()); parser.parseDcmFile(fileFormat, Tags.PixelData); // Make sure this is an image if (parser.getReadTag() != Tags.PixelData) { close(in); return AnonymizerStatus.SKIP(inFile, "Not an image"); } // Get the required parameters and make sure they are okay int numberOfFrames = getInt(dataset, Tags.NumberOfFrames, 1); int rows = getInt(dataset, Tags.Rows, 0); int columns = getInt(dataset, Tags.Columns, 0); String photometricInterpretation = getString(dataset, Tags.PhotometricInterpretation, ""); if ((rows == 0) || (columns == 0)) { close(in); return AnonymizerStatus.SKIP(inFile, "Unable to get the rows and columns"); } if (!photometricInterpretation.equals("PALETTE COLOR")) { close(in); return AnonymizerStatus.SKIP( inFile, "Unsupported PhotometricInterpretation: " + photometricInterpretation); } if (parser.getReadTag() != Tags.PixelData) { close(in); return AnonymizerStatus.SKIP(inFile, "No pixels"); } // Get the encoding and set the parameters DcmDecodeParam fileParam = parser.getDcmDecodeParam(); String fileEncodingUID = UIDs.ImplicitVRLittleEndian; FileMetaInfo fmi = dataset.getFileMetaInfo(); if (fmi != null) fileEncodingUID = fmi.getTransferSyntaxUID(); boolean isBigEndian = fileEncodingUID.equals(UIDs.ExplicitVRBigEndian); String encodingUID = UIDs.ExplicitVRLittleEndian; DcmEncodeParam encoding = (DcmEncodeParam) DcmDecodeParam.valueOf(encodingUID); boolean swap = (fileParam.byteOrder != encoding.byteOrder); if (encoding.encapsulated) { logger.debug("Encapsulated pixel data found"); close(in); return AnonymizerStatus.SKIP(inFile, "Encapsulated pixel data not supported"); } // Get the LUTs LUT red = new LUT( dataset.getInts(Tags.RedPaletteColorLUTDescriptor), dataset.getInts(Tags.RedPaletteColorLUTData)); LUT green = new LUT( dataset.getInts(Tags.GreenPaletteColorLUTDescriptor), dataset.getInts(Tags.GreenPaletteColorLUTData)); LUT blue = new LUT( dataset.getInts(Tags.BluePaletteColorLUTDescriptor), dataset.getInts(Tags.BluePaletteColorLUTData)); // Set the PlanarConfiguration to 0 dataset.putUS(Tags.PlanarConfiguration, 0); // Set the PhotometricInterpretation to RGB dataset.putCS(Tags.PhotometricInterpretation, "RGB"); // Set the pixel parameters dataset.putUS(Tags.SamplesPerPixel, 3); dataset.putUS(Tags.BitsAllocated, 8); dataset.putUS(Tags.BitsStored, 8); dataset.putUS(Tags.HighBit, 7); // Remove the lookup tables and their descriptors dataset.remove(Tags.RedPaletteColorLUTDescriptor); dataset.remove(Tags.GreenPaletteColorLUTDescriptor); dataset.remove(Tags.BluePaletteColorLUTDescriptor); dataset.remove(Tags.RedPaletteColorLUTData); dataset.remove(Tags.GreenPaletteColorLUTData); dataset.remove(Tags.BluePaletteColorLUTData); // Save the dataset to a temporary file, and rename at the end. File tempDir = outFile.getParentFile(); tempFile = File.createTempFile("DCMtemp-", ".anon", tempDir); out = new BufferedOutputStream(new FileOutputStream(tempFile)); // Create and write the metainfo for the encoding we are using fmi = oFact.newFileMetaInfo(dataset, encodingUID); dataset.setFileMetaInfo(fmi); fmi.write(out); // Write the dataset as far as was parsed dataset.writeDataset(out, encoding); // Process the pixels int nPixels = numberOfFrames * rows * columns; int nPixelBytes = nPixels * 3 /*samplesPerPixel*/; int pad = nPixelBytes & 1; dataset.writeHeader(out, encoding, parser.getReadTag(), VRs.OB, nPixelBytes + pad); int pd; int b1, b2; int bytesPerFrame = rows * columns * 2; byte[] frameBytes = new byte[bytesPerFrame]; for (int frame = 0; frame < numberOfFrames; frame++) { if (in.read(frameBytes, 0, frameBytes.length) != bytesPerFrame) throw new Exception("End of File"); for (int p = 0; p < bytesPerFrame; ) { b1 = frameBytes[p++]; b2 = frameBytes[p++]; if (!swap) { pd = ((b2 & 0xff) << 8) | (b1 & 0xff); } else { pd = ((b1 & 0xff) << 8) | (b2 & 0xff); } out.write(red.get(pd)); out.write(green.get(pd)); out.write(blue.get(pd)); } } if (pad != 0) out.write(0); logger.debug("Finished writing the pixels"); // Skip everything after the pixels out.flush(); out.close(); in.close(); outFile.delete(); tempFile.renameTo(outFile); return AnonymizerStatus.OK(outFile, ""); } catch (Exception e) { logger.debug("Exception while processing image.", e); // Close the input stream if it actually got opened. close(in); // Close the output stream if it actually got opened, // and delete the tempFile in case it is still there. try { if (out != null) { out.close(); tempFile.delete(); } } catch (Exception ex) { logger.warn("Unable to close the output stream."); } // Quarantine the object return AnonymizerStatus.QUARANTINE(inFile, e.getMessage()); } }