コード例 #1
0
 @Override
 protected IterableIntervalProjector2D<T, ARGBType> getProjector(
     final int dimX,
     final int dimY,
     final RandomAccessibleInterval<T> source,
     final RandomAccessibleInterval<ARGBType> target) {
   return new IterableIntervalProjector2D<T, ARGBType>(
       dimX, dimY, source, Views.iterable(target), m_converter);
 }
コード例 #2
0
  public FirstIteration(
      final ImagePortion portion,
      final RandomAccessibleInterval<FloatType> psi,
      final ArrayList<RandomAccessibleInterval<FloatType>> imgs,
      final ArrayList<RandomAccessibleInterval<FloatType>> weights) {
    this.portion = portion;
    this.psi = psi;
    this.imgs = imgs;
    this.weights = weights;

    this.psiIterable = Views.iterable(psi);
    this.iterableImgs = new ArrayList<IterableInterval<FloatType>>();
    this.iterableWeights = new ArrayList<IterableInterval<FloatType>>();

    this.realSum = new RealSum();

    if (imgs.size() != weights.size())
      throw new RuntimeException("Number of weights and images must be equal.");

    compatibleIteration = true;

    for (final RandomAccessibleInterval<FloatType> img : imgs) {
      final IterableInterval<FloatType> imgIterable = Views.iterable(img);

      if (!psiIterable.iterationOrder().equals(imgIterable.iterationOrder()))
        compatibleIteration = false;

      this.iterableImgs.add(imgIterable);
    }

    for (final RandomAccessibleInterval<FloatType> weight : weights) {
      final IterableInterval<FloatType> weightIterable = Views.iterable(weight);

      if (!psiIterable.iterationOrder().equals(weightIterable.iterationOrder()))
        compatibleIteration = false;

      for (final IterableInterval<FloatType> imgIterable : iterableImgs)
        if (!imgIterable.iterationOrder().equals(weightIterable.iterationOrder()))
          compatibleIteration = false;

      this.iterableWeights.add(weightIterable);
    }
  }
コード例 #3
0
ファイル: MapViewTest.java プロジェクト: djniles/imagej-ops
  @Test
  public void testRandomAccessibleIntervalView() {
    @SuppressWarnings("unchecked")
    final RandomAccessibleInterval<ByteType> res =
        (RandomAccessibleInterval<ByteType>) ops.run(MapViewRAIToRAI.class, in, op, new ByteType());

    final Cursor<ByteType> iterable = Views.iterable(res).cursor();
    while (iterable.hasNext()) {
      assertEquals((byte) 10, iterable.next().get());
    }
  }
コード例 #4
0
ファイル: LocalizationUtils.java プロジェクト: jacobke/imglib
 public static final <T extends RealType<T>> void addGaussianNoiseToImage(
     RandomAccessibleInterval<T> img, double sigma_noise) {
   IterableInterval<T> iterImg = Views.iterable(img);
   Cursor<T> lc = iterImg.localizingCursor();
   double val;
   T var = iterImg.firstElement().createVariable();
   while (lc.hasNext()) {
     lc.fwd();
     val = Math.max(0, sigma_noise * ran.nextGaussian());
     var.setReal(val);
     lc.get().add(var);
   }
 }
コード例 #5
0
 public static <T extends Type<T> & Comparable<T>> int countLocalMaxima(
     final RandomAccessibleInterval<T> img, final Shape shape) {
   int nMaxima = 0;
   final RandomAccessibleInterval<T> source = Views.interval(img, Intervals.expand(img, -1));
   final Cursor<T> center = Views.iterable(source).cursor();
   A:
   for (final Neighborhood<T> neighborhood : shape.neighborhoods(source)) {
     final T c = center.next();
     for (final T t : neighborhood) if (t.compareTo(c) > 0) continue A;
     ++nMaxima;
   }
   return nMaxima;
 }
コード例 #6
0
  protected void generateHistogramData(DataContainer<T> container) {
    double ch1BinWidth = getXBinWidth(container);
    double ch2BinWidth = getYBinWidth(container);

    // get the 2 images for the calculation of Pearson's
    final RandomAccessibleInterval<T> img1 = getImageCh1(container);
    final RandomAccessibleInterval<T> img2 = getImageCh2(container);
    final RandomAccessibleInterval<BitType> mask = container.getMask();

    // get the cursors for iterating through pixels in images
    TwinCursor<T> cursor =
        new TwinCursor<T>(
            img1.randomAccess(), img2.randomAccess(), Views.iterable(mask).localizingCursor());

    // create new image to put the scatter-plot in
    final ImgFactory<LongType> scatterFactory = new ArrayImgFactory<LongType>();
    plotImage = scatterFactory.create(new int[] {xBins, yBins}, new LongType());

    // create access cursors
    final RandomAccess<LongType> histogram2DCursor = plotImage.randomAccess();

    // iterate over images
    long[] pos = new long[plotImage.numDimensions()];
    while (cursor.hasNext()) {
      cursor.fwd();
      double ch1 = cursor.getFirst().getRealDouble();
      double ch2 = cursor.getSecond().getRealDouble();
      /* Scale values for both channels to fit in the range.
       * Moreover mirror the y value on the x axis.
       */
      pos[0] = getXValue(ch1, ch1BinWidth, ch2, ch2BinWidth);
      pos[1] = getYValue(ch1, ch1BinWidth, ch2, ch2BinWidth);
      // set position of input/output cursor
      histogram2DCursor.setPosition(pos);
      // get current value at position and increment it
      long count = histogram2DCursor.get().getIntegerLong();
      count++;

      histogram2DCursor.get().set(count);
    }

    xBinWidth = ch1BinWidth;
    yBinWidth = ch2BinWidth;
    xLabel = getLabelCh1();
    yLabel = getLabelCh2();
    xMin = getXMin(container);
    xMax = getXMax(container);
    yMin = getYMin(container);
    yMax = getYMax(container);
  }
コード例 #7
0
ファイル: LocalizationUtils.java プロジェクト: jacobke/imglib
 public static final <T extends RealType<T>> void addGaussianSpotToImage(
     RandomAccessibleInterval<T> img, double[] params) {
   IterableInterval<T> iterImg = Views.iterable(img);
   Cursor<T> lc = iterImg.localizingCursor();
   int nDims = img.numDimensions();
   double[] position = new double[nDims];
   double val;
   T var = iterImg.firstElement().createVariable();
   while (lc.hasNext()) {
     lc.fwd();
     lc.localize(position);
     val = gaussian.val(position, params);
     var.setReal(val);
     lc.get().add(var);
   }
 }
コード例 #8
0
ファイル: BrightnessContrast.java プロジェクト: joevegan/xray
  private void computeDataMinMax(final RandomAccessibleInterval<? extends RealType<?>> img) {
    // FIXME: Reconcile this with DefaultDatasetView.autoscale(int). There is
    // no reason to hardcode the usage of ComputeMinMax twice. Rather, there
    // should be a single entry point for obtain the channel min/maxes from
    // the metadata, and if they aren't there, then compute them. Probably
    // Dataset (not DatasetView) is a good place for it, because it is metadata
    // independent of the visualization settings.

    DataRange range = autoscaleService.getDefaultRandomAccessRange(img);
    dataMin = range.getMin();
    dataMax = range.getMax();
    // System.out.println("IN HERE!!!!!!");
    // System.out.println(" dataMin = " + dataMin);
    // System.out.println(" dataMax = " + dataMax);
    @SuppressWarnings({"unchecked", "rawtypes"})
    Iterable<T> iterable =
        Views.iterable((RandomAccessibleInterval<T>) (RandomAccessibleInterval) img);
    BinMapper1d<T> mapper = new Real1dBinMapper<T>(dataMin, dataMax, 256, false);
    Histogram1d<T> histogram = new Histogram1d<T>(iterable, mapper);
    if (bundle == null) {
      bundle = new HistogramBundle(histogram);
    } else {
      bundle.setHistogram(histogram);
    }
    bundle.setDataMinMax(dataMin, dataMax);
    // bundle.setLineSlopeIntercept(1, 0);
    log.debug("computeDataMinMax: dataMin=" + dataMin + ", dataMax=" + dataMax);
    // force a widget refresh to see new Hist (and also fill min and max fields)
    // NOPE. HistBundle is unchanged. Only internals are. So no
    // refresh called. Note that I changed InteractiveCommand::update() to
    // always setValue() and still this did not work. !!!! Huh?
    // update(getInfo().getMutableInput("bundle", HistogramBundle.class),
    // bundle);
    // NOPE
    // getInfo().getInput("bundle", HistogramBundle.class).setValue(this,
    // bundle);
    // NOPE
    // getInfo().setVisible(false);
    // getInfo().setVisible(true);
    // NOPE
    // getInfo().getMutableInput("bundle",HistogramBundle.class).initialize(this);
    // NOPE
    // getInfo().getMutableInput("bundle",HistogramBundle.class).callback(this);
  }
コード例 #9
0
 @Override
 public RandomAccessibleInterval<T> compute(
     final RandomAccessibleInterval<T> input, final RandomAccessibleInterval<T> output) {
   final StructuringElementCursor<T> inStructure =
       new StructuringElementCursor<T>(Views.extend(input, m_factory).randomAccess(), m_struc);
   final Cursor<T> out = Views.iterable(output).localizingCursor();
   double m;
   while (out.hasNext()) {
     out.next();
     inStructure.relocate(out);
     inStructure.next();
     m = inStructure.get().getRealDouble();
     while (inStructure.hasNext()) {
       inStructure.next();
       m = Math.min(m, inStructure.get().getRealDouble());
     }
     out.get().setReal(m);
   }
   return output;
 }
コード例 #10
0
  private static void adjustForOSEM(
      final HashMap<ViewId, RandomAccessibleInterval<FloatType>> weights,
      final WeightType weightType,
      final double osemspeedup) {
    if (osemspeedup == 1.0) return;

    if (weightType == WeightType.PRECOMPUTED_WEIGHTS
        || weightType == WeightType.WEIGHTS_ONLY
        || weightType == WeightType.LOAD_WEIGHTS) {
      for (final RandomAccessibleInterval<FloatType> w : weights.values()) {
        for (final FloatType f : Views.iterable(w))
          f.set(
              Math.min(
                  1, f.get() * (float) osemspeedup)); // individual contribution never higher than 1
      }
    } else if (weightType == WeightType.NO_WEIGHTS) {
      for (final RandomAccessibleInterval<FloatType> w : weights.values()) {
        final RandomAccess<FloatType> r = w.randomAccess();
        final long[] min = new long[w.numDimensions()];
        w.min(min);
        r.setPosition(min);
        r.get()
            .set(
                Math.min(
                    1,
                    r.get().get()
                        * (float) osemspeedup)); // individual contribution never higher than 1
      }
    } else if (weightType == WeightType.VIRTUAL_WEIGHTS) {
      for (final RandomAccessibleInterval<FloatType> w : weights.values())
        ((NormalizingRandomAccessibleInterval<FloatType>) w).setOSEMspeedup(osemspeedup);
    } else {
      throw new RuntimeException(
          "Weight Type: "
              + weightType.name()
              + " not supported in ProcessForDeconvolution.adjustForOSEM()");
    }
  }
コード例 #11
0
  /**
   * TODO
   *
   * @param cumulativeMinCutoff
   * @param cumulativeMaxCutoff
   * @param state
   * @param setupAssignments
   */
  public static void initBrightness(
      final double cumulativeMinCutoff,
      final double cumulativeMaxCutoff,
      final ViewerState state,
      final SetupAssignments setupAssignments) {
    final Source<?> source = state.getSources().get(state.getCurrentSource()).getSpimSource();
    final int timepoint = state.getCurrentTimepoint();
    if (!source.isPresent(timepoint)) return;
    if (!UnsignedShortType.class.isInstance(source.getType())) return;
    @SuppressWarnings("unchecked")
    final RandomAccessibleInterval<UnsignedShortType> img =
        (RandomAccessibleInterval<UnsignedShortType>)
            source.getSource(timepoint, source.getNumMipmapLevels() - 1);
    final long z = (img.min(2) + img.max(2) + 1) / 2;

    final int numBins = 6535;
    final Histogram1d<UnsignedShortType> histogram =
        new Histogram1d<UnsignedShortType>(
            Views.iterable(Views.hyperSlice(img, 2, z)),
            new Real1dBinMapper<UnsignedShortType>(0, 65535, numBins, false));
    final DiscreteFrequencyDistribution dfd = histogram.dfd();
    final long[] bin = new long[] {0};
    double cumulative = 0;
    int i = 0;
    for (; i < numBins && cumulative < cumulativeMinCutoff; ++i) {
      bin[0] = i;
      cumulative += dfd.relativeFrequency(bin);
    }
    final int min = i * 65535 / numBins;
    for (; i < numBins && cumulative < cumulativeMaxCutoff; ++i) {
      bin[0] = i;
      cumulative += dfd.relativeFrequency(bin);
    }
    final int max = i * 65535 / numBins;
    final MinMaxGroup minmax = setupAssignments.getMinMaxGroups().get(0);
    minmax.getMinBoundedValue().setCurrentValue(min);
    minmax.getMaxBoundedValue().setCurrentValue(max);
  }
コード例 #12
0
  public static <T extends Type<T> & Comparable<T>> int findLocalMaximaNeighborhood2(
      final RandomAccessibleInterval<T> img) {
    // final ArrayList< Point > maxima = new ArrayList< Point >();
    int nMaxima = 0;

    final Cursor<T> center =
        Views.iterable(Views.interval(img, Intervals.expand(img, -1))).localizingCursor();
    final LocalNeighborhood2<T> neighborhood = new LocalNeighborhood2<T>(img, center);
    final LocalNeighborhoodCursor2<T> nc = neighborhood.cursor();
    A:
    while (center.hasNext()) {
      final T t = center.next();
      neighborhood.updateCenter(center);
      nc.reset();
      while (nc.hasNext()) {
        final T n = nc.next();
        if (n.compareTo(t) > 0) continue A;
      }
      // maxima.add( new Point( center ) );
      ++nMaxima;
    }
    return nMaxima;
  }
コード例 #13
0
  protected void displayWeights(
      final double osemspeedup,
      final ArrayList<RandomAccessibleInterval<FloatType>> weights,
      final RandomAccessibleInterval<FloatType> overlapImg,
      final ImgFactory<FloatType> imgFactory) {
    final DisplayImage d = new DisplayImage();

    d.exportImage(overlapImg, bb, "Number of views per pixel");

    final Img<FloatType> w = imgFactory.create(overlapImg, new FloatType());
    final Img<FloatType> wosem = imgFactory.create(overlapImg, new FloatType());

    // split up into many parts for multithreading
    final Vector<ImagePortion> portions =
        FusionHelper.divideIntoPortions(
            Views.iterable(weights.get(0)).size(), Threads.numThreads() * 2);

    // set up executor service
    final ExecutorService taskExecutor = Executors.newFixedThreadPool(Threads.numThreads());
    final ArrayList<Callable<String>> tasks = new ArrayList<Callable<String>>();

    for (final ImagePortion portion : portions) {
      tasks.add(
          new Callable<String>() {
            @Override
            public String call() throws Exception {
              final ArrayList<Cursor<FloatType>> cursors = new ArrayList<Cursor<FloatType>>();
              final Cursor<FloatType> sum = w.cursor();
              final Cursor<FloatType> sumOsem = wosem.cursor();

              for (final RandomAccessibleInterval<FloatType> imgW : weights) {
                final Cursor<FloatType> c = Views.iterable(imgW).cursor();
                c.jumpFwd(portion.getStartPosition());
                cursors.add(c);
              }

              sum.jumpFwd(portion.getStartPosition());
              sumOsem.jumpFwd(portion.getStartPosition());

              for (long j = 0; j < portion.getLoopSize(); ++j) {
                double sumW = 0;
                double sumOsemW = 0;

                for (final Cursor<FloatType> c : cursors) {
                  final float w = c.next().get();
                  sumW += w;
                  sumOsemW += Math.min(1, w * osemspeedup);
                }

                sum.next().set((float) sumW);
                sumOsem.next().set((float) sumOsemW);
              }

              return "done.";
            }
          });
    }

    // run threads
    try {
      // invokeAll() returns when all tasks are complete
      taskExecutor.invokeAll(tasks);
    } catch (final Exception e) {
      IOFunctions.println("Failed to compute weight normalization for deconvolution: " + e);
      e.printStackTrace();
      return;
    }

    taskExecutor.shutdown();

    d.exportImage(w, bb, "Sum of weights per pixel");
    d.exportImage(wosem, bb, "OSEM=" + osemspeedup + ", sum of weights per pixel");
  }
コード例 #14
0
  /**
   * Fuses one stack, i.e. all angles/illuminations for one timepoint and channel
   *
   * @param timepoint
   * @param channel
   * @return
   */
  public boolean fuseStacksAndGetPSFs(
      final TimePoint timepoint,
      final Channel channel,
      final ImgFactory<FloatType> imgFactory,
      final int osemIndex,
      double osemspeedup,
      WeightType weightType,
      final HashMap<Channel, ChannelPSF> extractPSFLabels,
      final long[] psfSize,
      final HashMap<Channel, ArrayList<Pair<Pair<Angle, Illumination>, String>>> psfFiles,
      final boolean transformLoadedPSFs) {
    // TODO: get rid of this hack
    if (files != null) {
      weightType = WeightType.LOAD_WEIGHTS;
      IOFunctions.println("WARNING: LOADING WEIGHTS FROM IMAGES, files.length()=" + files.length);
    }

    // get all views that are fused for this timepoint & channel
    this.viewDescriptions =
        FusionHelper.assembleInputData(spimData, timepoint, channel, viewIdsToProcess);

    if (this.viewDescriptions.size() == 0) return false;

    this.imgs = new HashMap<ViewId, RandomAccessibleInterval<FloatType>>();
    this.weights = new HashMap<ViewId, RandomAccessibleInterval<FloatType>>();

    final Img<FloatType> overlapImg;

    if (weightType == WeightType.WEIGHTS_ONLY)
      overlapImg = imgFactory.create(bb.getDimensions(), new FloatType());
    else overlapImg = null;

    final boolean extractPSFs =
        (extractPSFLabels != null) && (extractPSFLabels.get(channel).getLabel() != null);
    final boolean loadPSFs = (psfFiles != null);

    if (extractPSFs) ePSF = new ExtractPSF<FloatType>();
    else if (loadPSFs) ePSF = loadPSFs(channel, viewDescriptions, psfFiles, transformLoadedPSFs);
    else {
      ePSF = assignOtherChannel(channel, extractPSFLabels);
    }

    if (ePSF == null) return false;

    // remember the extracted or loaded PSFs
    extractPSFLabels.get(channel).setExtractPSFInstance(ePSF);

    // we will need to run some batches until all is fused
    for (int i = 0; i < viewDescriptions.size(); ++i) {
      final ViewDescription vd = viewDescriptions.get(i);

      IOFunctions.println(
          "Transforming view "
              + i
              + " of "
              + (viewDescriptions.size() - 1)
              + " (viewsetup="
              + vd.getViewSetupId()
              + ", tp="
              + vd.getTimePointId()
              + ")");
      IOFunctions.println(
          "("
              + new Date(System.currentTimeMillis())
              + "): Reserving memory for transformed & weight image.");

      // creating the output
      RandomAccessibleInterval<FloatType> transformedImg; // might be null if WEIGHTS_ONLY
      final RandomAccessibleInterval<FloatType>
          weightImg; // never null (except LOAD_WEIGHTS which is not implemented yet)

      if (weightType == WeightType.WEIGHTS_ONLY) transformedImg = overlapImg;
      else transformedImg = imgFactory.create(bb.getDimensions(), new FloatType());

      IOFunctions.println(
          "("
              + new Date(System.currentTimeMillis())
              + "): Transformed image factory: "
              + imgFactory.getClass().getSimpleName());

      // loading the input if necessary
      final RandomAccessibleInterval<FloatType> img;

      if (weightType == WeightType.WEIGHTS_ONLY && !extractPSFs) {
        img = null;
      } else {
        IOFunctions.println("(" + new Date(System.currentTimeMillis()) + "): Loading image.");
        img = ProcessFusion.getImage(new FloatType(), spimData, vd, true);

        if (Img.class.isInstance(img))
          IOFunctions.println(
              "("
                  + new Date(System.currentTimeMillis())
                  + "): Input image factory: "
                  + ((Img<FloatType>) img).factory().getClass().getSimpleName());
      }

      // initializing weights
      IOFunctions.println(
          "("
              + new Date(System.currentTimeMillis())
              + "): Initializing transformation & weights: "
              + weightType.name());

      spimData.getViewRegistrations().getViewRegistration(vd).updateModel();
      final AffineTransform3D transform =
          spimData.getViewRegistrations().getViewRegistration(vd).getModel();
      final long[] offset = new long[] {bb.min(0), bb.min(1), bb.min(2)};

      if (weightType == WeightType.PRECOMPUTED_WEIGHTS || weightType == WeightType.WEIGHTS_ONLY)
        weightImg = imgFactory.create(bb.getDimensions(), new FloatType());
      else if (weightType == WeightType.NO_WEIGHTS)
        weightImg =
            Views.interval(
                new ConstantRandomAccessible<FloatType>(
                    new FloatType(1), transformedImg.numDimensions()),
                transformedImg);
      else if (weightType == WeightType.VIRTUAL_WEIGHTS) {
        final Blending blending = getBlending(img, blendingBorder, blendingRange, vd);

        weightImg =
            new TransformedRealRandomAccessibleInterval<FloatType>(
                blending, new FloatType(), transformedImg, transform, offset);
      } else // if ( processType == ProcessType.LOAD_WEIGHTS )
      {
        IOFunctions.println("WARNING: LOADING WEIGHTS FROM: '" + new File(files[i]) + "'");
        ImagePlus imp = StackImgLoaderIJ.open(new File(files[i]));
        weightImg = imgFactory.create(bb.getDimensions(), new FloatType());
        StackImgLoaderIJ.imagePlus2ImgLib2Img(imp, (Img<FloatType>) weightImg, false);
        imp.close();
        if (debugImport) {
          imp = ImageJFunctions.show(weightImg);
          imp.setTitle("ViewSetup " + vd.getViewSetupId() + " Timepoint " + vd.getTimePointId());
        }
      }

      // split up into many parts for multithreading
      final Vector<ImagePortion> portions =
          FusionHelper.divideIntoPortions(
              Views.iterable(transformedImg).size(), Threads.numThreads() * 4);

      // set up executor service
      final ExecutorService taskExecutor = Executors.newFixedThreadPool(Threads.numThreads());
      final ArrayList<Callable<String>> tasks = new ArrayList<Callable<String>>();

      IOFunctions.println(
          "("
              + new Date(System.currentTimeMillis())
              + "): Transforming image & computing weights.");

      for (final ImagePortion portion : portions) {
        if (weightType == WeightType.WEIGHTS_ONLY) {
          final Interval imgInterval =
              new FinalInterval(
                  ViewSetupUtils.getSizeOrLoad(
                      vd.getViewSetup(),
                      vd.getTimePoint(),
                      spimData.getSequenceDescription().getImgLoader()));
          final Blending blending = getBlending(imgInterval, blendingBorder, blendingRange, vd);

          tasks.add(
              new TransformWeights(
                  portion, imgInterval, blending, transform, overlapImg, weightImg, offset));
        } else if (weightType == WeightType.PRECOMPUTED_WEIGHTS) {
          final Blending blending = getBlending(img, blendingBorder, blendingRange, vd);

          tasks.add(
              new TransformInputAndWeights(
                  portion, img, blending, transform, transformedImg, weightImg, offset));
        } else if (weightType == WeightType.NO_WEIGHTS
            || weightType == WeightType.VIRTUAL_WEIGHTS
            || weightType == WeightType.LOAD_WEIGHTS) {
          tasks.add(new TransformInput(portion, img, transform, transformedImg, offset));
        } else {
          throw new RuntimeException(weightType.name() + " not implemented yet.");
        }
      }

      try {
        // invokeAll() returns when all tasks are complete
        taskExecutor.invokeAll(tasks);
      } catch (final InterruptedException e) {
        IOFunctions.println("Failed to compute fusion: " + e);
        e.printStackTrace();
        return false;
      }

      taskExecutor.shutdown();

      // extract PSFs if wanted
      if (extractPSFs) {
        final ArrayList<double[]> llist =
            getLocationsOfCorrespondingBeads(
                timepoint, vd, extractPSFLabels.get(channel).getLabel());

        IOFunctions.println(
            "("
                + new Date(System.currentTimeMillis())
                + "): Extracting PSF for viewsetup "
                + vd.getViewSetupId()
                + " using label '"
                + extractPSFLabels.get(channel).getLabel()
                + "'"
                + " ("
                + llist.size()
                + " corresponding detections available)");

        ePSF.extractNextImg(img, vd, transform, llist, psfSize);
      }

      if (weightType != WeightType.WEIGHTS_ONLY) imgs.put(vd, transformedImg);
      weights.put(vd, weightImg);
    }

    // normalize the weights
    final ArrayList<RandomAccessibleInterval<FloatType>> weightsSorted =
        new ArrayList<RandomAccessibleInterval<FloatType>>();

    for (final ViewDescription vd : viewDescriptions) weightsSorted.add(weights.get(vd));

    IOFunctions.println(
        "("
            + new Date(System.currentTimeMillis())
            + "): Computing weight normalization for deconvolution.");

    final WeightNormalizer wn;

    if (weightType == WeightType.WEIGHTS_ONLY
        || weightType == WeightType.PRECOMPUTED_WEIGHTS
        || weightType == WeightType.LOAD_WEIGHTS) wn = new WeightNormalizer(weightsSorted);
    else if (weightType == WeightType.VIRTUAL_WEIGHTS)
      wn = new WeightNormalizer(weightsSorted, imgFactory);
    else // if ( processType == ProcessType.NO_WEIGHTS )
    wn = null;

    if (wn != null && !wn.process()) return false;

    // put the potentially modified weights back
    for (int i = 0; i < viewDescriptions.size(); ++i)
      weights.put(viewDescriptions.get(i), weightsSorted.get(i));

    this.minOverlappingViews = wn.getMinOverlappingViews();
    this.avgOverlappingViews = wn.getAvgOverlappingViews();

    IOFunctions.println(
        "("
            + new Date(System.currentTimeMillis())
            + "): Minimal number of overlapping views: "
            + getMinOverlappingViews()
            + ", using "
            + (this.minOverlappingViews = Math.max(1, this.minOverlappingViews)));
    IOFunctions.println(
        "("
            + new Date(System.currentTimeMillis())
            + "): Average number of overlapping views: "
            + getAvgOverlappingViews()
            + ", using "
            + (this.avgOverlappingViews = Math.max(1, this.avgOverlappingViews)));

    if (osemIndex == 1) osemspeedup = getMinOverlappingViews();
    else if (osemIndex == 2) osemspeedup = getAvgOverlappingViews();

    IOFunctions.println(
        "("
            + new Date(System.currentTimeMillis())
            + "): Adjusting for OSEM speedup = "
            + osemspeedup);

    if (weightType == WeightType.WEIGHTS_ONLY)
      displayWeights(osemspeedup, weightsSorted, overlapImg, imgFactory);
    else adjustForOSEM(weights, weightType, osemspeedup);

    IOFunctions.println(
        "("
            + new Date(System.currentTimeMillis())
            + "): Finished precomputations for deconvolution.");

    return true;
  }