public ImagePlusLocalizablePlaneCursor(
      final ImagePlusContainer<T, ?> container, final Image<T> image, final T type) {
    super(container, image, type);

    this.width = image.getDimension(0);
    this.height = image.getDimension(1);
    this.depth = image.getDimension(2);
  }
 public AbstractSortedGrayLevelIterator(final Image<T> image) {
   this.image = image;
   this.position = image.createPositionArray();
   this.n = image.getNumPixels();
   this.maxIdx = this.n - 1;
   this.dimensions = image.getDimensions();
   createInternalCursor();
   this.sortedLinIdx = getLinearIndexArraySortedByGrayLevel();
   reset();
 }
예제 #3
0
 /**
  * Constructor
  *
  * @param image The intensity image to be labeled
  * @param scale the minimum distance between maxima of objects. Less technically, this should be
  *     the diameter of the smallest object.
  * @param sigma1 the standard deviation for the larger smoothing. The difference between sigma1
  *     and sigma2 should be roughly the width of the desired edge in the DoG image. A larger
  *     difference will obscure small, faint edges.
  * @param sigma2 the standard deviation for the smaller smoothing. This should be on the order of
  *     the largest insignificant feature in the image.
  * @param names - an iterator that generates names of type L for the labels. The iterator will
  *     waste the last name taken on the background label. You can use
  *     AllConnectedComponents.getIntegerNames() as your name generator if you don't care about
  *     names.
  */
 public GradientWatershed(
     Image<T> input, double[] scale, double[] sigma1, double[] sigma2, Iterator<L> names) {
   this.input = input;
   this.scale = scale;
   this.sigma1 = sigma1;
   this.sigma2 = sigma2;
   structuringElement = AllConnectedComponents.getStructuringElement(input.getNumDimensions());
   this.names = names;
   labelingFactory =
       new ImageFactory<LabelingType<L>>(new LabelingType<L>(), input.getContainerFactory());
 }
  protected <C extends Comparable<C> & Type<C>> C max(final Image<C> image) {

    C max = image.createType();

    // create a cursor for the image (the order does not matter)
    Cursor<C> cursor = image.createCursor();

    // initialize max with the first image value
    max.set(cursor.next());

    for (C type : cursor) {
      if (type.compareTo(max) > 0) max.set(type);
    }
    return max;
  }
예제 #5
0
  public ContainerImpl(final ContainerFactory factory, int[] dim) {
    this.numDimensions = dim.length;

    this.numPixels = getNumPixels(dim);

    this.dim = dim.clone();
    this.factory = factory;
    this.id = Image.createUniqueId();
  }
예제 #6
0
  protected void computeAdvanced(final long startPos, final long loopSize) {
    final LocalizableByDimCursor<S> cursorIn = image.createLocalizableByDimCursor();
    final LocalizableCursor<T> cursorOut = output.createLocalizableCursor();

    // move to the starting position of the current thread
    cursorOut.fwd(startPos);

    // do as many pixels as wanted by this thread
    for (long j = 0; j < loopSize; ++j) {
      cursorOut.fwd();
      cursorIn.setPosition(cursorOut);

      converter.convert(cursorIn.getType(), cursorOut.getType());
    }

    cursorIn.close();
    cursorOut.close();
  }
예제 #7
0
 /** Obtains planar access instance backing the given image, if any. */
 @SuppressWarnings("unchecked")
 public static PlanarAccess<ArrayDataAccess<?>> getPlanarAccess(Image<?> im) {
   PlanarAccess<ArrayDataAccess<?>> planarAccess = null;
   final Container<?> container = im.getContainer();
   if (container instanceof PlanarAccess<?>) {
     planarAccess = (PlanarAccess<ArrayDataAccess<?>>) container;
   }
   return planarAccess;
 }
예제 #8
0
  @Override
  public boolean process() {
    final long startTime = System.currentTimeMillis();

    final long imageSize = image.getNumPixels();

    final AtomicInteger ai = new AtomicInteger(0);
    final Thread[] threads = SimpleMultiThreading.newThreads(getNumThreads());

    final Vector<Chunk> threadChunks = SimpleMultiThreading.divideIntoChunks(imageSize, numThreads);

    final boolean isCompatible =
        image.getContainer().compareStorageContainerCompatibility(output.getContainer());

    for (int ithread = 0; ithread < threads.length; ++ithread)
      threads[ithread] =
          new Thread(
              new Runnable() {
                public void run() {
                  // Thread ID
                  final int myNumber = ai.getAndIncrement();

                  // get chunk of pixels to process
                  final Chunk myChunk = threadChunks.get(myNumber);

                  // check if all container types are comparable so that we can use simple iterators
                  // we assume transivity here
                  if (isCompatible) {
                    // we can simply use iterators
                    computeSimple(myChunk.getStartPosition(), myChunk.getLoopSize());
                  } else {
                    // we need a combination of Localizable and LocalizableByDim
                    computeAdvanced(myChunk.getStartPosition(), myChunk.getLoopSize());
                  }
                }
              });

    SimpleMultiThreading.startAndJoin(threads);

    processingTime = System.currentTimeMillis() - startTime;

    return true;
  }
예제 #9
0
 @Override
 public boolean checkInput() {
   if (errorMessage.length() > 0) {
     return false;
   } else if (image == null) {
     errorMessage = "ImageCalculator: [Image<S> image1] is null.";
     return false;
   } else if (output == null) {
     errorMessage = "ImageCalculator: [Image<T> output] is null.";
     return false;
   } else if (converter == null) {
     errorMessage = "ImageCalculator: [Converter<S,T>] is null.";
     return false;
   } else if (!image.getContainer().compareStorageContainerDimensions(output.getContainer())) {
     errorMessage =
         "ImageCalculator: Images have different dimensions, not supported:"
             + " Image: "
             + Util.printCoordinates(image.getDimensions())
             + " Output: "
             + Util.printCoordinates(output.getDimensions());
     return false;
   } else return true;
 }
예제 #10
0
  @Override
  public boolean process() {
    floatImage = null;
    if (output == null) {
      output = new Labeling<L>(labelingFactory, input.getDimensions(), null);
    } else {
      /*
       * Initialize the output to all background
       */
      LocalizableCursor<LabelingType<L>> c = output.createLocalizableCursor();
      List<L> background = c.getType().intern(new ArrayList<L>());
      for (LabelingType<L> t : c) {
        t.setLabeling(background);
      }
      c.close();
    }
    /*
     * Get the smoothed image.
     */
    Image<FloatType> kernel =
        FourierConvolution.createGaussianKernel(input.getContainerFactory(), scale);
    FourierConvolution<FloatType, FloatType> convolution =
        new FourierConvolution<FloatType, FloatType>(getFloatImage(), kernel);
    if (!convolution.process()) return false;
    Image<FloatType> smoothed = convolution.getResult();

    /*
     * Find the local maxima and label them individually.
     */
    PickImagePeaks<FloatType> peakPicker = new PickImagePeaks<FloatType>(smoothed);
    peakPicker.setSuppression(scale);
    peakPicker.process();
    Labeling<L> seeds = output.createNewLabeling();
    LocalizableByDimCursor<LabelingType<L>> lc = seeds.createLocalizableByDimCursor();
    LocalizableByDimCursor<FloatType> imageCursor = smoothed.createLocalizableByDimCursor();
    int[] dimensions = input.getDimensions();
    for (int[] peak : peakPicker.getPeakList()) {
      if (!filterPeak(imageCursor, peak, dimensions, false)) continue;
      lc.setPosition(peak);
      lc.getType().setLabel(names.next());
    }
    imageCursor.close();
    /*
     * Find the local minima and label them all the same.
     */
    List<L> background = lc.getType().intern(names.next());
    Converter<FloatType, FloatType> invert =
        new Converter<FloatType, FloatType>() {

          @Override
          public void convert(FloatType input, FloatType output) {
            output.setReal(-input.getRealFloat());
          }
        };
    ImageConverter<FloatType, FloatType> invSmoothed =
        new ImageConverter<FloatType, FloatType>(smoothed, smoothed, invert);
    invSmoothed.process();
    peakPicker = new PickImagePeaks<FloatType>(smoothed);
    peakPicker.setSuppression(scale);
    peakPicker.process();
    imageCursor = smoothed.createLocalizableByDimCursor();
    for (int[] peak : peakPicker.getPeakList()) {
      if (!filterPeak(imageCursor, peak, dimensions, true)) continue;
      lc.setPosition(peak);
      lc.getType().setLabeling(background);
    }
    lc.close();
    imageCursor.close();
    smoothed = null;
    invSmoothed = null;
    Image<FloatType> gradientImage = getGradientImage();
    if (gradientImage == null) return false;
    /*
     * Run the seeded watershed on the image.
     */
    Watershed.seededWatershed(gradientImage, seeds, structuringElement, output);
    return true;
  }
예제 #11
0
 protected ImageFactory<FloatType> getFloatFactory() {
   if (floatFactory == null) {
     floatFactory = new ImageFactory<FloatType>(new FloatType(), input.getContainerFactory());
   }
   return floatFactory;
 }
예제 #12
0
 @Override
 public boolean checkInput() {
   if (error_message.length() > 0) return false;
   if (input == null) {
     error_message = "The input image is null.";
     return false;
   }
   if (scale == null) {
     error_message = "The scale is null.";
     return false;
   }
   if (sigma1 == null) {
     error_message = "The first smoothing standard deviation (sigma1) is null.";
     return false;
   }
   if (sigma2 == null) {
     error_message = "The second smoothing standard deviation (sigma2) is null.";
     return false;
   }
   if (structuringElement == null) {
     error_message = "The structuring element is null.";
     return false;
   }
   if (names == null) {
     error_message = "The names iterator is null.";
   }
   if (!checkDimensions(scale)) {
     error_message = "The dimensions of the scale do not match those of the image";
     return false;
   }
   if (!checkDimensions(sigma1)) {
     error_message = "The dimensions of sigma1 do not match those of the image";
     return false;
   }
   if (!checkDimensions(sigma2)) {
     error_message = "The dimensions of sigma2 do not match those of the image";
     return false;
   }
   for (int[] coord : structuringElement) {
     if (coord == null) {
       error_message = "One of the coordinates of the structuring element is null.";
       return false;
     }
     if (!checkDimensions(coord)) {
       error_message =
           "The dimensions of one of the coordinates of the structuring element does not match those of the image.";
       return false;
     }
   }
   if (wantsToQuantize && (numQuanta < 2)) {
     error_message =
         String.format(
             "The number of quanta is %d, but must be at least 2 (and ideally > 20).", numQuanta);
     return false;
   }
   for (int i = 0; i < sigma1.length; i++) {
     if (sigma1[i] <= sigma2[i]) {
       error_message =
           String.format(
               "All values of sigma1 should be greater than sigma2. For dimension %d, sigma1=%f, sigma2=%f",
               i, sigma1[i], sigma2[i]);
       return false;
     }
   }
   if (output != null) {
     int[] dimensions = output.getDimensions();
     if (!checkDimensions(dimensions)) {
       error_message = "The labeling container does not have the correct number of dimensions";
       return false;
     }
     for (int i = 0; i < dimensions.length; i++) {
       if (dimensions[i] != input.getDimension(i)) {
         error_message =
             String.format(
                 "The labeling container is not the same size as the image: dimension %d, labeling = %d, image = %d",
                 i, dimensions[i], input.getDimension(i));
         return false;
       }
     }
   }
   return true;
 }
예제 #13
0
 protected boolean checkDimensions(double[] array) {
   return array.length == input.getNumDimensions();
 }
  /**
   * Fuse one slice/volume (one channel)
   *
   * @param output - same the type of the ImagePlus input
   * @param input - FloatType, because of Interpolation that needs to be done
   * @param transform - the transformation
   */
  public static <T extends RealType<T>> void fuseChannel(
      final Image<T> output,
      final Image<FloatType> input,
      final float[] offset,
      final InvertibleCoordinateTransform transform,
      final InterpolatorFactory<FloatType> factory) {
    final int dims = output.getNumDimensions();
    long imageSize = output.getDimension(0);

    for (int d = 1; d < output.getNumDimensions(); ++d) imageSize *= output.getDimension(d);

    // run multithreaded
    final AtomicInteger ai = new AtomicInteger(0);
    final Thread[] threads = SimpleMultiThreading.newThreads();

    final Vector<Chunk> threadChunks =
        SimpleMultiThreading.divideIntoChunks(imageSize, threads.length);

    for (int ithread = 0; ithread < threads.length; ++ithread)
      threads[ithread] =
          new Thread(
              new Runnable() {
                public void run() {
                  // Thread ID
                  final int myNumber = ai.getAndIncrement();

                  // get chunk of pixels to process
                  final Chunk myChunk = threadChunks.get(myNumber);
                  final long startPos = myChunk.getStartPosition();
                  final long loopSize = myChunk.getLoopSize();

                  final LocalizableCursor<T> out = output.createLocalizableCursor();
                  final Interpolator<FloatType> in = input.createInterpolator(factory);

                  final float[] tmp = new float[input.getNumDimensions()];

                  try {
                    // move to the starting position of the current thread
                    out.fwd(startPos);

                    // do as many pixels as wanted by this thread
                    for (long j = 0; j < loopSize; ++j) {
                      out.fwd();

                      for (int d = 0; d < dims; ++d) tmp[d] = out.getPosition(d) + offset[d];

                      transform.applyInverseInPlace(tmp);

                      in.setPosition(tmp);
                      out.getType().setReal(in.getType().get());
                    }
                  } catch (NoninvertibleModelException e) {
                    IJ.log("Cannot invert model, qutting.");
                    return;
                  }
                }
              });

    SimpleMultiThreading.startAndJoin(threads);

    /*
    final LocalizableCursor<T> out = output.createLocalizableCursor();
    final Interpolator<FloatType> in = input.createInterpolator( factory );

    final float[] tmp = new float[ input.getNumDimensions() ];

    try
    {
    	while ( out.hasNext() )
    	{
    		out.fwd();

    		for ( int d = 0; d < dims; ++d )
    			tmp[ d ] = out.getPosition( d ) + offset[ d ];

    		transform.applyInverseInPlace( tmp );

    		in.setPosition( tmp );
    		out.getType().setReal( in.getType().get() );
    	}
    }
    catch (NoninvertibleModelException e)
    {
    	IJ.log( "Cannot invert model, qutting." );
    	return;
    }
    */
  }
예제 #15
0
 public ImageConverter(
     final Image<S> image, final ImageFactory<T> factory, final Converter<S, T> converter) {
   this(image, createImageFromFactory(factory, image.getDimensions()), converter);
 }
 public SortedGrayLevelIteratorFactory(Image<T> image) {
   isArrayContainer = Array.class.isInstance(image.getContainer());
 }
 @Override
 public void getDimensions(int[] position) {
   image.getDimensions(position);
 }
 @Override
 public int[] getPosition() {
   int[] position = image.createPositionArray();
   getPosition(position);
   return position;
 }
  public static <T extends RealType<T>> CompositeImage createOverlay(
      final T targetType,
      final ArrayList<ImagePlus> images,
      final ArrayList<InvertibleBoundable> models,
      final int dimensionality,
      final int timepoint,
      final InterpolatorFactory<FloatType> factory) {
    final int numImages = images.size();

    // the size of the new image
    final int[] size = new int[dimensionality];
    // the offset relative to the output image which starts with its local coordinates (0,0,0)
    final float[] offset = new float[dimensionality];

    // estimate the boundaries of the output image and the offset for fusion (negative coordinates
    // after transform have to be shifted to 0,0,0)
    estimateBounds(offset, size, images, models, dimensionality);

    // for output
    final ImageFactory<T> f = new ImageFactory<T>(targetType, new ImagePlusContainerFactory());
    // the composite
    final ImageStack stack = new ImageStack(size[0], size[1]);

    int numChannels = 0;

    // loop over all images
    for (int i = 0; i < images.size(); ++i) {
      final ImagePlus imp = images.get(i);

      // loop over all channels
      for (int c = 1; c <= imp.getNChannels(); ++c) {
        final Image<T> out = f.createImage(size);
        fuseChannel(
            out,
            ImageJFunctions.convertFloat(Hyperstack_rearranger.getImageChunk(imp, c, timepoint)),
            offset,
            models.get(i + (timepoint - 1) * numImages),
            factory);
        try {
          final ImagePlus outImp = ((ImagePlusContainer<?, ?>) out.getContainer()).getImagePlus();
          for (int z = 1; z <= out.getDimension(2); ++z)
            stack.addSlice(imp.getTitle(), outImp.getStack().getProcessor(z));
        } catch (ImgLibException e) {
          IJ.log("Output image has no ImageJ type: " + e);
        }

        // count all channels
        ++numChannels;
      }
    }

    // convertXYZCT ...
    ImagePlus result =
        new ImagePlus(
            "overlay " + images.get(0).getTitle() + " ... " + images.get(numImages - 1).getTitle(),
            stack);

    // numchannels, z-slices, timepoints (but right now the order is still XYZCT)
    if (dimensionality == 3) {
      result.setDimensions(size[2], numChannels, 1);
      result = OverlayFusion.switchZCinXYCZT(result);
    } else {
      result.setDimensions(numChannels, 1, 1);
    }

    return new CompositeImage(result, CompositeImage.COMPOSITE);
  }
 @Override
 public int[] createPositionArray() {
   return image.createPositionArray();
 }
예제 #21
0
  @Override
  public boolean process() {

    // 0. Prepare new dimensions

    int downSamplingFactor = settings.downSamplingFactor;
    int[] dimensions = new int[img.getNumDimensions()];
    int[] dsarr = new int[img.getNumDimensions()];
    float[] dwnCalibration = new float[img.getNumDimensions()];
    for (int i = 0; i < 2; i++) {
      dimensions[i] = img.getDimension(i) / downSamplingFactor;
      dsarr[i] = downSamplingFactor;
      dwnCalibration[i] = calibration[i] * downSamplingFactor;
    }
    if (img.getNumDimensions() > 2) {
      // 3D
      float zratio = calibration[2] / calibration[0]; // Z spacing is how much bigger
      int zdownsampling = (int) (downSamplingFactor / zratio); // temper z downsampling
      zdownsampling = Math.max(1, zdownsampling); // but at least 1
      dimensions[2] = img.getDimension(2) / zdownsampling;
      dsarr[2] = zdownsampling;
      dwnCalibration[2] = calibration[2] * zdownsampling;
    }

    // 1. Downsample the image

    Image<T> downsampled = img.createNewImage(dimensions);
    LocalizableCursor<T> dwnCursor = downsampled.createLocalizableCursor();
    LocalizableByDimCursor<T> srcCursor = img.createLocalizableByDimCursor();
    int[] pos = dwnCursor.createPositionArray();

    while (dwnCursor.hasNext()) {
      dwnCursor.fwd();
      dwnCursor.getPosition(pos);

      // Scale up position
      for (int i = 0; i < pos.length; i++) {
        pos[i] = pos[i] * dsarr[i];
      }

      // Pass it to source cursor
      srcCursor.setPosition(pos);

      // Copy pixel data
      dwnCursor.getType().set(srcCursor.getType());
    }
    dwnCursor.close();
    srcCursor.close();

    // 2. Segment downsampled image

    // 2.1. Create settings object
    LogSegmenterSettings logSettings = new LogSegmenterSettings();
    logSettings.expectedRadius = settings.expectedRadius;
    logSettings.threshold = settings.threshold;
    logSettings.doSubPixelLocalization = true;
    ;
    logSettings.useMedianFilter = settings.useMedianFilter;

    // 2.2 Instantiate segmenter
    LogSegmenter<T> segmenter = new LogSegmenter<T>();
    segmenter.setTarget(downsampled, dwnCalibration, logSettings);

    // 2.3 Execute segmentation
    if (!segmenter.checkInput() || !segmenter.process()) {
      errorMessage = BASE_ERROR_MESSAGE + segmenter.getErrorMessage();
      return false;
    }

    // 3. Benefits
    spots = segmenter.getResult();

    return true;
  }
 @Override
 public int getNumDimensions() {
   return image.getNumDimensions();
 }
예제 #23
0
  /**
   * Return a difference of gaussian image that measures the gradient at a scale defined by the two
   * sigmas of the gaussians.
   *
   * @param image
   * @param sigma1
   * @param sigma2
   * @return
   */
  public Image<FloatType> getGradientImage() {
    /*
     * Create the DoG kernel.
     */
    double[][] kernels1d1 = new double[input.getNumDimensions()][];
    double[][] kernels1d2 = new double[input.getNumDimensions()][];
    int[] kernelDimensions = input.createPositionArray();
    int[] offset = input.createPositionArray();
    for (int i = 0; i < kernels1d1.length; i++) {
      kernels1d1[i] = Util.createGaussianKernel1DDouble(sigma1[i], true);
      kernels1d2[i] = Util.createGaussianKernel1DDouble(sigma2[i], true);
      kernelDimensions[i] = kernels1d1[i].length;
      offset[i] = (kernels1d1[i].length - kernels1d2[i].length) / 2;
    }
    Image<FloatType> kernel = getFloatFactory().createImage(kernelDimensions);
    LocalizableCursor<FloatType> kc = kernel.createLocalizableCursor();
    int[] position = input.createPositionArray();
    for (FloatType t : kc) {
      kc.getPosition(position);
      double value1 = 1;
      double value2 = 1;
      for (int i = 0; i < kernels1d1.length; i++) {
        value1 *= kernels1d1[i][position[i]];
        int position2 = position[i] - offset[i];
        if ((position2 >= 0) && (position2 < kernels1d2[i].length)) {
          value2 *= kernels1d2[i][position2];
        } else {
          value2 = 0;
        }
      }
      t.setReal(value1 - value2);
    }
    kc.close();
    /*
     * Apply the kernel to the image.
     */
    FourierConvolution<FloatType, FloatType> convolution =
        new FourierConvolution<FloatType, FloatType>(getFloatImage(), kernel);
    if (!convolution.process()) return null;
    Image<FloatType> result = convolution.getResult();
    /*
     * Quantize the image.
     */
    ComputeMinMax<FloatType> computeMinMax = new ComputeMinMax<FloatType>(result);
    computeMinMax.process();
    final float min = computeMinMax.getMin().get();
    final float max = computeMinMax.getMax().get();
    if (max == min) return result;

    ImageConverter<FloatType, FloatType> quantizer =
        new ImageConverter<FloatType, FloatType>(
            result,
            result.getImageFactory(),
            new Converter<FloatType, FloatType>() {

              @Override
              public void convert(FloatType input, FloatType output) {
                float value = (input.get() - min) / (max - min);
                value = Math.round(value * 100);
                output.set(value);
              }
            });
    quantizer.process();
    return quantizer.getResult();
  }
예제 #24
0
  /**
   * Reads in an imglib {@link Image} from the given initialized {@link IFormatReader}, using the
   * given {@link ImageFactory}.
   */
  public <T extends RealType<T>> Image<T> openImage(IFormatReader r, ImageFactory<T> imageFactory)
      throws FormatException, IOException {
    final String[] dimTypes = getDimTypes(r);
    final int[] dimLengths = getDimLengths(r);

    // TEMP - make suffix out of dimension types, until imglib supports them
    final String id = r.getCurrentFile();
    final File idFile = new File(id);
    String name = idFile.exists() ? idFile.getName() : id;
    name = encodeName(name, dimTypes);

    // create image object
    final Image<T> img = imageFactory.createImage(dimLengths, name);

    // set calibration of the image
    img.setCalibration(getCalibration(r, dimLengths));

    // TODO - create better container types; either:
    // 1) an array container type using one byte array per plane
    // 2) as #1, but with an IFormatReader reference reading planes on demand
    // 3) as PlanarContainer, but with an IFormatReader reference
    //    reading planes on demand

    // PlanarContainer is useful for efficient access to pixels in ImageJ
    // (e.g., getPixels)
    // #1 is useful for efficient Bio-Formats import, and useful for tools
    //   needing byte arrays (e.g., BufferedImage Java3D texturing by reference)
    // #2 is useful for efficient memory use for tools wanting matching
    //   primitive arrays (e.g., virtual stacks in ImageJ)
    // #3 is useful for efficient memory use

    // get container
    final PlanarAccess<?> planarAccess = getPlanarAccess(img);
    final T inputType = makeType(r.getPixelType());
    final T outputType = imageFactory.createType();
    final boolean compatibleTypes = outputType.getClass().isAssignableFrom(inputType.getClass());

    final long startTime = System.currentTimeMillis();

    // populate planes
    final int planeCount = r.getImageCount();
    if (planarAccess == null || !compatibleTypes) {
      // use cursor to populate planes

      // NB: This solution is general and works regardless of container,
      // but at the expense of performance both now and later.

      final LocalizablePlaneCursor<T> cursor = img.createLocalizablePlaneCursor();
      byte[] plane = null;
      for (int no = 0; no < planeCount; no++) {
        notifyListeners(
            new StatusEvent(no, planeCount, "Reading plane " + (no + 1) + "/" + planeCount));
        if (plane == null) plane = r.openBytes(no);
        else r.openBytes(no, plane);
        populatePlane(r, no, plane, cursor);
      }
      cursor.close();
    } else {
      // populate the values directly using PlanarAccess interface;
      // e.g., to a PlanarContainer

      byte[] plane = null;
      for (int no = 0; no < planeCount; no++) {
        notifyListeners(
            new StatusEvent(no, planeCount, "Reading plane " + (no + 1) + "/" + planeCount));
        if (plane == null) plane = r.openBytes(no);
        else r.openBytes(no, plane);
        populatePlane(r, no, plane, planarAccess);
      }
    }
    r.close();

    final long endTime = System.currentTimeMillis();
    final float time = (endTime - startTime) / 1000f;
    notifyListeners(
        new StatusEvent(
            planeCount, planeCount, id + ": read " + planeCount + " planes in " + time + "s"));

    return img;
  }
  public static <T extends RealType<T>> ImagePlus createReRegisteredSeries(
      final T targetType,
      final ImagePlus imp,
      final ArrayList<InvertibleBoundable> models,
      final int dimensionality,
      final String directory) {
    final int numImages = imp.getNFrames();

    // the size of the new image
    final int[] size = new int[dimensionality];
    // the offset relative to the output image which starts with its local coordinates (0,0,0)
    final float[] offset = new float[dimensionality];

    final int[][] imgSizes = new int[numImages][dimensionality];

    for (int i = 0; i < numImages; ++i) {
      imgSizes[i][0] = imp.getWidth();
      imgSizes[i][1] = imp.getHeight();
      if (dimensionality == 3) imgSizes[i][2] = imp.getNSlices();
    }

    // estimate the boundaries of the output image and the offset for fusion (negative coordinates
    // after transform have to be shifted to 0,0,0)
    estimateBounds(offset, size, imgSizes, models, dimensionality);

    // use the same size as the first image, this is a little bit ad-hoc
    if (useSizeOfFirstImage) {
      for (int d = 0; d < dimensionality; ++d) {
        size[d] = imgSizes[0][d];
        offset[d] = 0;
      }
    }

    // for output
    final ImageFactory<T> f = new ImageFactory<T>(targetType, new ImagePlusContainerFactory());
    // the composite
    final ImageStack stack = new ImageStack(size[0], size[1]);

    for (int t = 1; t <= numImages; ++t) {
      for (int c = 1; c <= imp.getNChannels(); ++c) {
        final Image<T> out = f.createImage(size);
        if (useNearestNeighborInterpolation)
          fuseChannel(
              out,
              ImageJFunctions.convertFloat(Hyperstack_rearranger.getImageChunk(imp, c, t)),
              offset,
              models.get(t - 1),
              new NearestNeighborInterpolatorFactory<FloatType>(
                  new OutOfBoundsStrategyValueFactory<FloatType>()));
        else
          fuseChannel(
              out,
              ImageJFunctions.convertFloat(Hyperstack_rearranger.getImageChunk(imp, c, t)),
              offset,
              models.get(t - 1),
              new LinearInterpolatorFactory<FloatType>(
                  new OutOfBoundsStrategyValueFactory<FloatType>()));
        try {
          final ImagePlus outImp = ((ImagePlusContainer<?, ?>) out.getContainer()).getImagePlus();

          if (directory == null) {
            // fuse
            for (int z = 1; z <= out.getDimension(2); ++z)
              stack.addSlice(imp.getTitle(), outImp.getStack().getProcessor(z));
          } else {
            // write to disk
            for (int z = 1; z <= out.getDimension(2); ++z) {
              final ImagePlus tmp =
                  new ImagePlus(
                      "img_t"
                          + lz(t, numImages)
                          + "_z"
                          + lz(z, out.getDimension(2))
                          + "_c"
                          + lz(c, imp.getNChannels()),
                      outImp.getStack().getProcessor(z));
              final FileSaver fs = new FileSaver(tmp);
              fs.saveAsTiff(new File(directory, tmp.getTitle()).getAbsolutePath());
              tmp.close();
            }

            out.close();
            outImp.close();
          }
        } catch (ImgLibException e) {
          IJ.log("Output image has no ImageJ type: " + e);
        }
      }
    }

    if (directory != null) return null;

    // convertXYZCT ...
    ImagePlus result = new ImagePlus("registered " + imp.getTitle(), stack);

    // numchannels, z-slices, timepoints (but right now the order is still XYZCT)
    if (dimensionality == 3) {
      result.setDimensions(size[2], imp.getNChannels(), imp.getNFrames());
      result = OverlayFusion.switchZCinXYCZT(result);
      return new CompositeImage(result, CompositeImage.COMPOSITE);
    } else {
      // IJ.log( "ch: " + imp.getNChannels() );
      // IJ.log( "slices: " + imp.getNSlices() );
      // IJ.log( "frames: " + imp.getNFrames() );
      result.setDimensions(imp.getNChannels(), 1, imp.getNFrames());

      if (imp.getNChannels() > 1) return new CompositeImage(result, CompositeImage.COMPOSITE);
      else return result;
    }
  }
 @Override
 public Container<T> getStorageContainer() {
   return image.getContainer();
 }