public String printResults( String results, ImageAndAnalysisDetails imageAndAnalysisDetails, ImagePlus imp) { String[] propertyNames = { "File Name", "Patient's Name", "Patient ID", "Patient's Birth Date", "Acquisition Date", "Pixel Spacing", "ObjLen" }; String[] parameters = { Double.toString(imageAndAnalysisDetails.airThreshold), Double.toString(imageAndAnalysisDetails.fatThreshold), Double.toString(imageAndAnalysisDetails.muscleThreshold), Double.toString(imageAndAnalysisDetails.marrowThreshold), Double.toString(imageAndAnalysisDetails.softThreshold), Double.toString(imageAndAnalysisDetails.rotationThreshold), Double.toString(imageAndAnalysisDetails.areaThreshold), Double.toString(imageAndAnalysisDetails.BMDthreshold), Double.toString(imageAndAnalysisDetails.scalingFactor), Double.toString(imageAndAnalysisDetails.constant) }; if (imp != null) { if (Distribution_Analysis.getInfoProperty(imageInfo, "File Name") != null) { results += Distribution_Analysis.getInfoProperty(imageInfo, "File Name") + "\t"; } else { if (imp.getImageStackSize() == 1) { results += Distribution_Analysis.getInfoProperty(imageInfo, "Title") + "\t"; } else { results += imageInfo.substring(0, imageInfo.indexOf("\n")) + "\t"; } } for (int i = 1; i < propertyNames.length; ++i) { results += Distribution_Analysis.getInfoProperty(imageInfo, propertyNames[i]) + "\t"; } } for (int i = 0; i < parameters.length; ++i) { results += parameters[i] + "\t"; } results += Boolean.toString(imageAndAnalysisDetails.manualRotation) + "\t"; results += Boolean.toString(imageAndAnalysisDetails.flipDistribution) + "\t"; results += Boolean.toString(imageAndAnalysisDetails.guessFlip) + "\t"; results += Boolean.toString(imageAndAnalysisDetails.guessLarger) + "\t"; results += Boolean.toString(imageAndAnalysisDetails.stacked) + "\t"; results += Boolean.toString(imageAndAnalysisDetails.invertGuess) + "\t"; results += Boolean.toString(imageAndAnalysisDetails.allowCleaving) + "\t"; results += Boolean.toString(imageAndAnalysisDetails.preventPeeling) + "\t"; results += imageAndAnalysisDetails.roiChoice + "\t"; results += imageAndAnalysisDetails.rotationChoice + "\t"; results += Boolean.toString(imageAndAnalysisDetails.flipHorizontal) + "\t"; results += Boolean.toString(imageAndAnalysisDetails.flipVertical) + "\t"; return results; }
private ImagePlus findSurfaceVoxels(final ImagePlus imp) { final int w = imp.getWidth(); final int h = imp.getHeight(); final int d = imp.getImageStackSize(); final ImageStack stack = imp.getImageStack(); final ImageStack surfaceStack = new ImageStack(w, h, d); for (int z = 0; z < d; z++) { IJ.showStatus("Finding surface voxels"); final byte[] pixels = (byte[]) stack.getPixels(z + 1); surfaceStack.setPixels(pixels.clone(), z + 1); final ImageProcessor surfaceIP = surfaceStack.getProcessor(z + 1); for (int y = 0; y < h; y++) { checkNeighbours: for (int x = 0; x < w; x++) { if (getPixel(stack, x, y, z, w, h, d) == (byte) 0) continue; for (int nz = -1; nz < 2; nz++) { final int znz = z + nz; for (int ny = -1; ny < 2; ny++) { final int yny = y + ny; for (int nx = -1; nx < 2; nx++) { final int xnx = x + nx; final byte pixel = getPixel(stack, xnx, yny, znz, w, h, d); if (pixel == (byte) 0) continue checkNeighbours; } } } // we checked all the neighbours for a 0 // but didn't find one, so this is not a surface voxel surfaceIP.set(x, y, (byte) 1); } } } // turn all the 1's into 0's final int wh = w * h; for (int z = 0; z < d; z++) { IJ.showStatus("Finding surface voxels"); final ImageProcessor ip = surfaceStack.getProcessor(z + 1); for (int i = 0; i < wh; i++) { if (ip.get(i) == (byte) 1) ip.set(i, (byte) 0); } } final ImagePlus surfaceImp = new ImagePlus("Surface"); surfaceImp.setStack(surfaceStack); surfaceImp.setCalibration(imp.getCalibration()); return surfaceImp; }
/** * Reduce error in thickness quantitation by trimming the one pixel overhang in the thickness map * * @param imp Binary input image * @param impLTC Thickness map * @param inv true if calculating thickness of background, false for foreground * @return Thickness map with pixels masked by input image */ private ImagePlus trimOverhang(ImagePlus imp, ImagePlus impLTC, boolean inv) { final int w = imp.getWidth(); final int h = imp.getHeight(); final int d = imp.getImageStackSize(); final ImageStack stack = imp.getImageStack(); final ImageStack mapStack = impLTC.getImageStack(); final int keepValue = inv ? 0 : 255; ImageProcessor ip = new ByteProcessor(w, h); ImageProcessor map = new FloatProcessor(w, h); for (int z = 1; z <= d; z++) { IJ.showStatus("Masking thickness map..."); IJ.showProgress(z, d); ip = stack.getProcessor(z); map = mapStack.getProcessor(z); for (int y = 0; y < h; y++) { for (int x = 0; x < w; x++) { if (ip.get(x, y) != keepValue) map.set(x, y, 0); } } } return impLTC; }
/* Method takes roi and substract its mean value from each frame in the image*/ public static ImagePlus DarkNoiseRemoval(ImagePlus imp_in, Roi noise_roi) { ImagePlus imp_out = imp_in.duplicate(); int size = imp_in.getImageStackSize(); ImageProcessor ip = imp_out.getProcessor(); double minThreshold = ip.getMinThreshold(); double maxThreshold = ip.getMaxThreshold(); Calibration cal = imp_in.getCalibration(); int measurements = Analyzer.getMeasurements(); int current = imp_in.getCurrentSlice(); ImageStack stack = imp_in.getStack(); for (int i = 1; i <= size; i++) { ip = stack.getProcessor(i); if (minThreshold != ImageProcessor.NO_THRESHOLD) ip.setThreshold(minThreshold, maxThreshold, ImageProcessor.NO_LUT_UPDATE); ip.setRoi(noise_roi); ImageStatistics stats = ImageStatistics.getStatistics(ip, measurements, cal); ip.resetRoi(); ip.subtract(stats.mean); stack.setProcessor(ip, i); } imp_out.setStack(stack); return imp_out; }
/** * Calculate the Rand index stats between to 3D clusters, as described by William M. Rand * \cite{Rand71}, but pruning out the zero component of the ground truth, which leads to an * asymmetric index. The input images must be 16-bit. Note: this method is based on the N^2 * normalization. * * <p>BibTeX: * * <pre> * @article{Rand71, * author = {William M. Rand}, * title = {Objective criteria for the evaluation of clustering methods}, * journal = {Journal of the American Statistical Association}, * year = {1971}, * volume = {66}, * number = {336}, * pages = {846--850}, * doi = {10.2307/2284239) * } * </pre> * * @param segA ground truth, 3D segmented image (objects are labeled with different numbers) * @param segB prediction, 3D segmented image (objects are labeled with different numbers) * @return [ precision, recall, Rand index with n^2 normalization ] */ public static double[] adaptedRandIndexStats3DN2(ImagePlus segA, ImagePlus segB) { if (segA.getImageStack().getProcessor(1) instanceof ShortProcessor == false || segB.getImageStack().getProcessor(1) instanceof ShortProcessor == false) return null; // IJ.log( "Calculating adapted Rand index stats..."); int nSlices = segA.getImageStackSize(); int nLabelsA = 0; int nLabelsB = 0; // Calculate larger IDs of both clusters for (int slice = 1; slice <= nSlices; slice++) { ImageProcessor gt = segA.getImageStack().getProcessor(slice); gt.resetMinAndMax(); if (nLabelsA < gt.getMax()) nLabelsA = (int) gt.getMax(); ImageProcessor proposal = segB.getImageStack().getProcessor(slice); proposal.resetMinAndMax(); if (nLabelsB < proposal.getMax()) nLabelsB = (int) proposal.getMax(); } // Form the contingency matrix long[][] pij = new long[nLabelsA + 1][nLabelsB + 1]; double n = segA.getImageStackSize() * segA.getWidth() * segA.getHeight(); for (int slice = 1; slice <= nSlices; slice++) { // IJ.log(" Processing slice " + slice +"..."); ShortProcessor cluster1 = (ShortProcessor) segA.getImageStack().getProcessor(slice); ShortProcessor cluster2 = (ShortProcessor) segB.getImageStack().getProcessor(slice); final short[] pixels1 = (short[]) cluster1.getPixels(); final short[] pixels2 = (short[]) cluster2.getPixels(); double nPixels = pixels1.length; for (int i = 0; i < nPixels; i++) pij[pixels1[i] & 0xffff][pixels2[i] & 0xffff]++; } // sum of squares of sums of rows // (skip background objects in the first cluster) double[] ai = new double[pij.length]; for (int i = 1; i < pij.length; i++) for (int j = 0; j < pij[0].length; j++) { ai[i] += pij[i][j]; } // sum of squares of sums of columns // (prune out the zero component in the labeling (un-assigned "out" space)) double[] bj = new double[pij[0].length]; for (int j = 1; j < pij[0].length; j++) for (int i = 1; i < pij.length; i++) { bj[j] += pij[i][j]; } double[] pi0 = new double[pij.length]; double aux = 0; for (int i = 1; i < pij.length; i++) { pi0[i] = pij[i][0]; aux += pi0[i]; } double sumA = 0; for (int i = 0; i < ai.length; i++) sumA += ai[i] * ai[i]; double sumB = 0; for (int j = 0; j < bj.length; j++) sumB += bj[j] * bj[j]; sumB += aux / n; double sumAB = 0; for (int i = 1; i < pij.length; i++) for (int j = 1; j < pij[0].length; j++) sumAB += pij[i][j] * pij[i][j]; sumAB += aux / n; // return precision, recall and Rand index value return new double[] {sumAB / sumB, sumAB / sumA, 1.0 - (sumA + sumB - 2.0 * sumAB) / (n * n)}; }
/** * Calculate the Rand index between to 3D clusters, as described by William M. Rand \cite{Rand71}, * but pruning out the zero component of the ground truth, which leads to an asymmetric index. The * input images must be 16-bit. Note: this method is based on the N_choose_2 normalization. * * <p>BibTeX: * * <pre> * @article{Rand71, * author = {William M. Rand}, * title = {Objective criteria for the evaluation of clustering methods}, * journal = {Journal of the American Statistical Association}, * year = {1971}, * volume = {66}, * number = {336}, * pages = {846--850}, * doi = {10.2307/2284239) * } * </pre> * * @param originalLabels ground truth, 3D segmented image (objects are labeled with different * numbers) * @param proposedLabels prediction, 3D segmented image (objects are labeled with different * numbers) * @return adapted Rand index value and prediction statistics */ public static ClassificationStatistics adaptedRandIndexStats3D( ImagePlus originalLabels, ImagePlus proposedLabels) { if (originalLabels.getImageStack().getProcessor(1) instanceof ShortProcessor == false || proposedLabels.getImageStack().getProcessor(1) instanceof ShortProcessor == false) return null; // IJ.log( "Calculating adapted Rand index stats..."); int nSlices = originalLabels.getImageStackSize(); int maxIDGroundTruth = 0; int maxIDProposal = 0; // Calculate larger IDs of both clusters for (int slice = 1; slice <= nSlices; slice++) { ImageProcessor gt = originalLabels.getImageStack().getProcessor(slice); gt.resetMinAndMax(); if (maxIDGroundTruth < gt.getMax()) maxIDGroundTruth = (int) gt.getMax(); ImageProcessor proposal = proposedLabels.getImageStack().getProcessor(slice); proposal.resetMinAndMax(); if (maxIDProposal < proposal.getMax()) maxIDProposal = (int) proposal.getMax(); } double agreements = 0; // Form the contingency matrix long[][] cont = new long[maxIDGroundTruth + 1][maxIDProposal + 1]; double[] ni = new double[cont.length]; double[] nj = new double[cont[0].length]; // number of pixels that are "in" (not background) in // cluster number 1 (ground truth) double n = 0; for (int slice = 1; slice <= nSlices; slice++) { // IJ.log(" Processing slice " + slice +"..."); ShortProcessor cluster1 = (ShortProcessor) originalLabels.getImageStack().getProcessor(slice); ShortProcessor cluster2 = (ShortProcessor) proposedLabels.getImageStack().getProcessor(slice); final short[] pixels1 = (short[]) cluster1.getPixels(); final short[] pixels2 = (short[]) cluster2.getPixels(); double nPixels = pixels1.length; for (int i = 0; i < nPixels; i++) { cont[pixels1[i] & 0xffff][pixels2[i] & 0xffff]++; if (pixels1[i] > 0) n++; } } // sum of squares of sums of rows // (skip background objects in the first cluster) for (int i = 1; i < cont.length; i++) for (int j = 0; j < cont[0].length; j++) { ni[i] += cont[i][j]; } // sum of squares of sums of columns // (prune out the zero component in the labeling (un-assigned "out" space)) for (int j = 1; j < cont[0].length; j++) for (int i = 1; i < cont.length; i++) { nj[j] += cont[i][j]; } // true positives - type (i): objects in the pair are placed in the // same class in cluster1 and in the same class in claster2 // (prune out the zero component in the labeling (un-assigned "out" space)) double truePositives = 0; for (int j = 1; j < cont[0].length; j++) for (int i = 1; i < cont.length; i++) truePositives += cont[i][j] * (cont[i][j] - 1.0); // / 2.0; // total number of pairs (after pruning background pixels // of the ground truth) double nPairsTotal = n * (n - 1.0); // / 2.0 ; // total number of positive samples in ground truth double nPosTrue = 0; for (int k = 0; k < ni.length; k++) nPosTrue += ni[k] * (ni[k] - 1.0); // /2.0; // number of pairs actually classified as positive (in the prediction) double nPosActual = 0; for (int k = 0; k < nj.length; k++) nPosActual += nj[k] * (nj[k] - 1.0); // /2.0; // true negatives - type (ii): objects in the pair are placed in different // classes in cluster1 and in different classes in claster2 // trueNegatives = nNegTrue - falsePositives = (nPairsTotal - nPosTrue) - (nPosActual - // truePositives) double trueNegatives = nPairsTotal + truePositives - nPosTrue - nPosActual; // IJ.log(" agreements = " + (truePositives + trueNegatives) ); agreements += truePositives + trueNegatives; // number of agreements double falsePositives = nPosActual - truePositives; double nNegActual = nPairsTotal - nPosActual; double falseNegatives = nNegActual - trueNegatives; truePositives /= 2.0; trueNegatives /= 2.0; falsePositives /= 2.0; falseNegatives /= 2.0; // agreements /= 2.0; // nPairsTotal /= 2.0; double randIndex = agreements / nPairsTotal; return new ClassificationStatistics( truePositives, trueNegatives, falsePositives, falseNegatives, randIndex); }