@Parameters({"target"})
 @BeforeClass
 public void setUp(String target) throws Exception {
   log.info("METADATA VALIDATOR TARGET: " + target);
   sf = new TestServiceFactory().proxy();
   config = new ImportConfig();
   // Let the user know at what level we're logging
   ch.qos.logback.classic.Logger lociLogger =
       (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("loci");
   ch.qos.logback.classic.Logger omeLogger =
       (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("ome.formats");
   log.info(
       String.format(
           "Log levels -- Bio-Formats: %s OMERO.importer: %s",
           lociLogger.getLevel(), omeLogger.getLevel()));
   store = new OMEROMetadataStoreClient();
   store.initialize(sf);
   store.setEnumerationProvider(new TestEnumerationProvider());
   store.setInstanceProvider(new BlitzInstanceProvider(store.getEnumerationProvider()));
   minimalStore = new OMEROMetadataStoreClient();
   minimalStore.initialize(sf);
   minimalStore.setEnumerationProvider(new TestEnumerationProvider());
   minimalStore.setInstanceProvider(
       new BlitzInstanceProvider(minimalStore.getEnumerationProvider()));
   wrapper = new OMEROWrapper(config);
   wrapper.setMetadataOptions(new DefaultMetadataOptions(MetadataLevel.ALL));
   minimalWrapper = new OMEROWrapper(config);
   minimalWrapper.setMetadataOptions(new DefaultMetadataOptions(MetadataLevel.MINIMUM));
   wrapper.setMetadataStore(store);
   store.setReader(wrapper.getImageReader());
   minimalStore.setReader(minimalWrapper.getImageReader());
 }
 /**
  * Dumps <code>TRACE</code> data for a given metadata store.
  *
  * @param store The store to dump <code>TRACE</code> data for.
  */
 private void traceMetadataStoreData(OMEROMetadataStoreClient store) {
   containerCache = store.getContainerCache();
   referenceCache = store.getReferenceCache();
   log.trace("Starting container cache...");
   for (LSID key : containerCache.keySet()) {
     String s =
         String.format(
             "%s == %s,%s",
             key, containerCache.get(key).sourceObject, containerCache.get(key).LSID);
     log.trace(s);
   }
   log.trace("Starting reference cache...");
   for (LSID key : referenceCache.keySet()) {
     for (LSID value : referenceCache.get(key)) {
       String s = String.format("%s == %s", key, value);
       log.trace(s);
     }
   }
   log.trace("containerCache contains " + containerCache.size() + " entries.");
   log.trace("referenceCache contains " + store.countCachedReferences(null, null) + " entries.");
   List<IObjectContainer> imageContainers = store.getIObjectContainers(Image.class);
   for (IObjectContainer imageContainer : imageContainers) {
     Image image = (Image) imageContainer.sourceObject;
     log.trace(
         String.format(
             "Image indexes:%s name:%s", imageContainer.indexes, image.getName().getValue()));
   }
 }
 @AfterClass
 public void tearDown() throws IOException {
   wrapper.close();
   minimalWrapper.close();
   store.logout();
   minimalStore.logout();
 }
  public Object step(int step) {
    helper.assertStep(step);
    try {
      MDC.put("fileset", logFilename);
      log.debug("Step " + step);
      Job j = activity.getChild();
      if (j == null) {
        throw helper.cancel(new ERR(), null, "null-job");
      } else if (!(j instanceof MetadataImportJob)) {
        throw helper.cancel(new ERR(), null, "unexpected-job-type", "job-type", j.ice_id());
      }

      if (step == 0) {
        return importMetadata((MetadataImportJob) j);
      } else if (step == 1) {
        return pixelData(null); // (ThumbnailGenerationJob) j);
      } else if (step == 2) {
        return generateThumbnails(null); // (PixelDataJob) j); Nulls image
      } else if (step == 3) {
        // TODO: indexing and scripting here as well.
        store.launchProcessing();
        return null;
      } else if (step == 4) {
        return objects;
      } else {
        throw helper.cancel(new ERR(), null, "bad-step", "step", "" + step);
      }
    } catch (MissingLibraryException mle) {
      notifyObservers(new ErrorHandler.MISSING_LIBRARY(fileName, mle, usedFiles, format));
      throw helper.cancel(new ERR(), mle, "import-missing-library", "filename", fileName);
    } catch (UnsupportedCompressionException uce) {
      // Handling as UNKNOWN_FORMAT for 4.3.0
      notifyObservers(new ErrorHandler.UNKNOWN_FORMAT(fileName, uce, this));
      throw helper.cancel(new ERR(), uce, "import-unknown-format", "filename", fileName);
    } catch (UnknownFormatException ufe) {
      notifyObservers(new ErrorHandler.UNKNOWN_FORMAT(fileName, ufe, this));
      throw helper.cancel(new ERR(), ufe, "import-unknown-format", "filename", fileName);
    } catch (IOException io) {
      notifyObservers(new ErrorHandler.FILE_EXCEPTION(fileName, io, usedFiles, format));
      throw helper.cancel(new ERR(), io, "import-file-exception", "filename", fileName);
    } catch (FormatException fe) {
      notifyObservers(new ErrorHandler.FILE_EXCEPTION(fileName, fe, usedFiles, format));
      throw helper.cancel(new ERR(), fe, "import-file-exception", "filename", fileName);
    } catch (Cancel c) {
      throw c;
    } catch (Throwable t) {
      notifyObservers(
          new ErrorHandler.INTERNAL_EXCEPTION(
              fileName, new RuntimeException(t), usedFiles, format));
      throw helper.cancel(new ERR(), t, "import-request-failure");
    } finally {
      try {
        long size = logPath.size();
        store.updateFileSize(logFile, size);
      } catch (Throwable t) {
        throw helper.cancel(new ERR(), t, "update-log-file-size");
      }
      MDC.clear();
    }
  }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testLogicalChannelRefs() {
   Class<? extends IObject> klass = LogicalChannel.class;
   List<IObjectContainer> containers = store.getIObjectContainers(klass);
   referenceCache = store.getReferenceCache();
   for (IObjectContainer container : containers) {
     LSID lsid = new LSID(container.LSID);
     if (!referenceCache.containsKey(lsid)) {
       continue;
     }
     List<LSID> references = referenceCache.get(lsid);
     assertTrue(references.size() > 0);
     for (LSID referenceLSID : references) {
       String asString = referenceLSID.toString();
       if (asString.endsWith(OMEROMetadataStoreClient.OMERO_EMISSION_FILTER_SUFFIX)
           || asString.endsWith(OMEROMetadataStoreClient.OMERO_EXCITATION_FILTER_SUFFIX)) {
         int index = asString.lastIndexOf(':');
         referenceLSID = new LSID(asString.substring(0, index));
       }
       assertNotNull(referenceLSID);
       List<Class<? extends IObject>> klasses = new ArrayList<Class<? extends IObject>>();
       klasses.add(Filter.class);
       klasses.add(FilterSet.class);
       String e = String.format("LSID %s not found in container cache", referenceLSID);
       assertTrue(e, authoritativeLSIDExists(klasses, referenceLSID));
     }
   }
 }
  public boolean run(String targetDirectory) throws Throwable {
    // Create a time stamp and use it for the project name
    String projectName = new Date().toString();
    log.info("Storing project: " + projectName);
    Project project = store.addProject(projectName, "");

    // Our root directory
    File projectDirectory = new File(targetDirectory);
    boolean status = false;
    if (!config.getRecurse()) {
      // Do not parse sub-directory - only import files in the target.
      String name = projectDirectory.getName();
      log.info("Storing dataset: " + name);
      Dataset dataset = store.addDataset(name, "", project);
      status = processDirectory(config.getPopulate(), projectDirectory, dataset);
    } else {
      // Parse the sub-directories - these will become our datasets
      for (File datasetDirectory : projectDirectory.listFiles()) {
        if (datasetDirectory.exists() && datasetDirectory.isDirectory()) {
          String name = datasetDirectory.getName();
          log.info("Storing dataset: " + name);
          Dataset dataset = store.addDataset(name, "", project);
          // In each sub-directory/dataset, import the images needed
          status = processDirectory(config.getPopulate(), datasetDirectory, dataset);
        }
      }
    }
    store.logout();
    return status;
  }
 public void clear() {
   try {
     store.setGroup(null);
     store.setCurrentLogFile(null, null);
     store.createRoot();
   } catch (Throwable t) {
     log.error("failed to clear metadata store", t);
   }
 }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testPlatesExist() {
   List<IObjectContainer> containers = store.getIObjectContainers(WellSample.class);
   for (IObjectContainer container : containers) {
     Map<String, Integer> indexes = container.indexes;
     Integer plateIndex = indexes.get(Index.PLATE_INDEX.getValue());
     String e = String.format("Plate %d not found in container cache", plateIndex);
     assertTrue(e, store.countCachedContainers(Plate.class, plateIndex) > 0);
   }
 }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testObjectiveSettingsIndexes() {
   Class<? extends IObject> klass = ObjectiveSettings.class;
   List<IObjectContainer> containers = store.getIObjectContainers(klass);
   for (IObjectContainer container : containers) {
     Map<String, Integer> indexes = container.indexes;
     Integer imageIndex = indexes.get(Index.IMAGE_INDEX.getValue());
     int imageCount = store.countCachedContainers(Image.class, null);
     String e = String.format("imageIndex %d >= imageCount %d", imageIndex, imageCount);
     assertFalse(e, imageIndex >= imageCount);
   }
 }
  public ImportLibrary(
      OMEROMetadataStoreClient client,
      OMEROWrapper reader,
      FileTransfer transfer,
      List<FileExclusion> exclusions,
      int minutesToWait) {
    if (client == null || reader == null) {
      throw new NullPointerException("All arguments to ImportLibrary() must be non-null.");
    }

    this.store = client;
    this.transfer = transfer;
    if (exclusions != null) {
      this.exclusions.addAll(exclusions);
    }
    this.minutesToWait = minutesToWait;
    repo = lookupManagedRepository();
    // Adapter which should be used for callbacks. This is more
    // complicated than it needs to be at the moment. We're only sure that
    // the OMSClient has a ServiceFactory (and not necessarily a client)
    // so we have to inspect various fields to get the adapter.
    sf = store.getServiceFactory();
    oa = sf.ice_getConnection().getAdapter();
    final Ice.Communicator ic = oa.getCommunicator();
    category = omero.client.getRouter(ic).getCategoryForClient();
  }
 @Parameters({"target"})
 @Test
 public void testMetadataLevelAllSetId(String target) throws Exception {
   wrapper.setId(target);
   store.postProcess();
   traceMetadataStoreData(store);
 }
 @Parameters({"target"})
 @Test
 public void testMetadataLevelMinimumSetId(String target) throws Exception {
   minimalWrapper.setId(target);
   minimalStore.postProcess();
   traceMetadataStoreData(minimalStore);
 }
 /* (non-Javadoc)
  * @see java.lang.Runnable#run()
  */
 public void run() {
   try {
     synchronized (client) {
       if (client != null) {
         client.ping();
       }
     }
   } catch (Throwable t) {
     log.error("Exception while executing ping(), logging Connector out: ", t);
     try {
       client.logout();
       notifyObservers(new ImportEvent.LOGGED_OUT());
     } catch (Exception e) {
       log.error("Nested error on client.logout() " + "while handling exception from ping()", e);
     }
   }
 }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testOTFIsReferenced() {
   Class<? extends IObject> klass = OTF.class;
   List<IObjectContainer> containers = store.getIObjectContainers(klass);
   referenceCache = store.getReferenceCache();
   for (IObjectContainer container : containers) {
     LSID lsid = new LSID(container.LSID);
     for (LSID target : referenceCache.keySet()) {
       for (LSID reference : referenceCache.get(target)) {
         if (reference.equals(lsid)) {
           return;
         }
       }
     }
     fail(String.format("%s %s not referenced by any object.", klass, lsid));
   }
 }
  /**
   * Uses the {@link OMEROMetadataStoreClient} to save all metadata for the current image provided.
   *
   * @param mij Object hosting metadata to save.
   * @return the newly created {@link Pixels} id.
   * @throws FormatException if there is an error parsing metadata.
   * @throws IOException if there is an error reading the file.
   */
  @SuppressWarnings({"unchecked", "rawtypes"})
  public Map<String, List<IObject>> importMetadata(MetadataImportJob mij) throws Throwable {
    notifyObservers(new ImportEvent.LOADING_IMAGE(shortName, 0, 0, 0));

    // 1st we post-process the metadata that we've been given.
    notifyObservers(new ImportEvent.BEGIN_POST_PROCESS(0, null, null, null, 0, null));
    store.setUserSpecifiedName(userSpecifiedName);
    store.setUserSpecifiedDescription(userSpecifiedDescription);
    if (userPixels != null && userPixels.length >= 3)
      // The array could be empty due to Ice-non-null semantics.
      store.setUserSpecifiedPhysicalPixelSizes(userPixels[0], userPixels[1], userPixels[2]);
    store.setUserSpecifiedTarget(userSpecifiedTarget);
    store.setUserSpecifiedAnnotations(annotationList);
    store.postProcess();
    notifyObservers(new ImportEvent.END_POST_PROCESS(0, null, userSpecifiedTarget, null, 0, null));

    notifyObservers(new ImportEvent.BEGIN_SAVE_TO_DB(0, null, userSpecifiedTarget, null, 0, null));
    objects = store.saveToDB(activity);
    pixList = (List) objects.get(Pixels.class.getSimpleName());
    imageList = (List) objects.get(Image.class.getSimpleName());
    plateList = (List) objects.get(Plate.class.getSimpleName());
    notifyObservers(new ImportEvent.END_SAVE_TO_DB(0, null, userSpecifiedTarget, null, 0, null));

    return objects;
  }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testLightSourceSettingsLightSourceRef() {
   Class<? extends IObject> klass = LightSettings.class;
   List<IObjectContainer> containers = store.getIObjectContainers(klass);
   referenceCache = store.getReferenceCache();
   for (IObjectContainer container : containers) {
     LSID lsid = new LSID(container.LSID);
     String e = String.format("%s %s not found in reference cache", klass, container.LSID);
     assertTrue(e, referenceCache.containsKey(lsid));
     List<LSID> references = referenceCache.get(lsid);
     assertTrue(references.size() > 0);
     for (LSID referenceLSID : references) {
       assertNotNull(referenceLSID);
       klass = LightSource.class;
       e = String.format("%s with LSID %s not found in container cache", klass, referenceLSID);
       assertTrue(e, authoritativeLSIDExists(klass, referenceLSID));
     }
   }
 }
 /**
  * Examines the container cache and returns whether or not an LSID is present.
  *
  * @param klass Instance class of the source object container.
  * @param lsid LSID to compare against.
  * @return <code>true</code> if the object exists in the container cache, and <code>false</code>
  *     otherwise.
  */
 private boolean authoritativeLSIDExists(Class<? extends IObject> klass, LSID lsid) {
   List<IObjectContainer> containers = store.getIObjectContainers(klass);
   for (IObjectContainer container : containers) {
     LSID containerLSID = new LSID(container.LSID);
     if (containerLSID.equals(lsid)) {
       return true;
     }
   }
   return false;
 }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testChannelCount() {
   List<IObjectContainer> containers = store.getIObjectContainers(Pixels.class);
   for (IObjectContainer container : containers) {
     Pixels pixels = (Pixels) container.sourceObject;
     assertNotNull(pixels.getSizeC());
     int sizeC = pixels.getSizeC().getValue();
     Integer imageIndex = container.indexes.get(Index.IMAGE_INDEX.getValue());
     int count = store.countCachedContainers(Channel.class, imageIndex);
     String e = String.format("Pixels sizeC %d != channel object count %d", sizeC, count);
     for (int c = 0; c < sizeC; c++) {
       count = store.countCachedContainers(Channel.class, imageIndex, c);
       e =
           String.format(
               "Missing channel object; imageIndex=%d " + "channelIndex=%d", imageIndex, c);
       assertEquals(e, 1, count);
     }
   }
 }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testPlaneInfoZCT() {
   List<IObjectContainer> containers = store.getIObjectContainers(PlaneInfo.class);
   for (IObjectContainer container : containers) {
     PlaneInfo planeInfo = (PlaneInfo) container.sourceObject;
     assertNotNull("theZ is null", planeInfo.getTheZ());
     assertNotNull("theC is null", planeInfo.getTheC());
     assertNotNull("theT is null", planeInfo.getTheT());
   }
 }
 /** opens the file using the {@link FormatReader} instance */
 private void open(OMEROWrapper reader, OMEROMetadataStoreClient store, CheckedPath targetFile)
     throws FormatException, IOException {
   // reader.close(); This instance is no longer re-used
   reader.setMetadataStore(store);
   reader.setMinMaxStore(store);
   store.setReader(reader.getImageReader());
   targetFile.bfSetId(reader);
   // reset series count
   if (log.isDebugEnabled()) {
     log.debug("Image Count: " + reader.getImageCount());
   }
 }
  /**
   * Primary user method for importing a number
   *
   * @param config The configuration information.
   * @param candidates Hosts information about the files to import.
   */
  public boolean importCandidates(ImportConfig config, ImportCandidates candidates) {
    List<ImportContainer> containers = candidates.getContainers();
    if (containers != null) {
      int numDone = 0;
      for (int index = 0; index < containers.size(); index++) {
        ImportContainer ic = containers.get(index);
        if (DATASET_CLASS.equals(config.targetClass.get())) {
          ic.setTarget(store.getTarget(Dataset.class, config.targetId.get()));
        } else if (SCREEN_CLASS.equals(config.targetClass.get())) {
          ic.setTarget(store.getTarget(Screen.class, config.targetId.get()));
        }

        if (config.checksumAlgorithm.get() != null) {
          ic.setChecksumAlgorithm(config.checksumAlgorithm.get());
        }

        try {
          importImage(ic, index, numDone, containers.size());
          numDone++;
        } catch (Throwable t) {
          String message = "Error on import";
          if (t instanceof ServerError) {
            final ServerError se = (ServerError) t;
            if (StringUtils.isNotBlank(se.message)) {
              message += ": " + se.message;
            }
          }
          log.error(message, t);
          if (!config.contOnError.get()) {
            log.info("Exiting on error");
            return false;
          } else {
            log.info("Continuing after error");
          }
        }
      }
    }
    return true;
  }
 private void cleanupStore() {
   try {
     if (store != null) {
       try {
         store.logout();
       } finally {
         store = null;
       }
     }
   } catch (Throwable e) {
     log.error("Failed on cleanupStore", e);
   }
 }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testFilterSetRefs() {
   Class<? extends IObject> klass = FilterSet.class;
   List<IObjectContainer> containers = store.getIObjectContainers(klass);
   referenceCache = store.getReferenceCache();
   for (IObjectContainer container : containers) {
     LSID lsid = new LSID(container.LSID);
     if (!referenceCache.containsKey(lsid)) {
       continue;
     }
     List<LSID> references = referenceCache.get(lsid);
     assertTrue(references.size() > 0);
     for (LSID referenceLSID : references) {
       assertNotNull(referenceLSID);
       List<Class<? extends IObject>> klasses = new ArrayList<Class<? extends IObject>>();
       klasses.add(Filter.class);
       klasses.add(Dichroic.class);
       String e = String.format("LSID %s not found in container cache", referenceLSID);
       assertTrue(e, authoritativeLSIDExists(klasses, referenceLSID));
     }
   }
 }
  public TestEngine(TestEngineConfig config)
      throws CannotCreateSessionException, PermissionDeniedException, ServerError {
    this.config = config;
    ProxyFactory pf = new ProxyFactory(new OMEROMetadataStoreClient());
    pf.addAdvice(interceptor);
    pf.setProxyTargetClass(true);
    store = (OMEROMetadataStoreClient) pf.getProxy();
    wrapper = new OMEROWrapper(new ImportConfig());

    login_url = config.getFeedbackLoginUrl();
    login_username = config.getFeedbackLoginUsername();
    login_password = config.getFeedbackLoginPassword();
    message_url = config.getFeedbackMessageUrl();
    comment_url = config.getCommentUrl();

    // Login
    if (config.getSessionKey() != null) {
      store.initialize(config.getHostname(), config.getPort(), config.getSessionKey());
    } else {
      store.initialize(
          config.getUsername(), config.getPassword(), config.getHostname(), config.getPort());
    }
    importLibrary = new ImportLibrary(store, wrapper);
  }
  public Object pixelData(PixelDataJob pdj) throws Throwable {

    if (!reader.isMinMaxSet() && !noStatsInfo) {
      // Parse the binary data to generate min/max values
      int seriesCount = reader.getSeriesCount();
      for (int series = 0; series < seriesCount; series++) {
        ImportSize size = new ImportSize(fileName, pixList.get(series), reader.getDimensionOrder());
        Pixels pixels = pixList.get(series);
        MessageDigest md = parseData(fileName, series, size);
        if (md != null) {
          final String s = Hex.encodeHexString(md.digest());
          pixels.setSha1(store.toRType(s));
        }
      }
    }

    // As we're in metadata-only mode on we need to
    // tell the server which Image matches which series.
    int series = 0;
    for (final Pixels pixels : pixList) {
      store.setPixelsFile(pixels.getId().getValue(), fileName, repoUuid);
      pixels.getImage().setSeries(store.toRType(series++));
    }

    for (final Image image : imageList) {
      image.unloadAnnotationLinks();
    }

    store.updatePixels(pixList);

    if (!reader.isMinMaxSet() && !noStatsInfo) {
      store.populateMinMax();
    }

    return null;
  }
 /**
  * If available, populates overlays for a given set of pixels objects.
  *
  * @param pixelsList Pixels objects to populate overlays for.
  * @param plateIds Plate object IDs to populate overlays for.
  */
 private void importOverlays(List<Pixels> pixelsList, List<Long> plateIds)
     throws FormatException, IOException {
   IFormatReader baseReader = reader.getImageReader().getReader();
   if (baseReader instanceof MIASReader) {
     try {
       MIASReader miasReader = (MIASReader) baseReader;
       ServiceFactoryPrx sf = store.getServiceFactory();
       OverlayMetadataStore s = new OverlayMetadataStore();
       s.initialize(sf, pixelsList, plateIds);
       miasReader.parseMasks(s);
       s.complete();
     } catch (ServerError e) {
       log.warn("Error while populating MIAS overlays.", e);
     }
   }
 }
 @Test(dependsOnMethods = {"testMetadataLevel"})
 public void testCreationDateIsReasonable() {
   List<IObjectContainer> containers = store.getIObjectContainers(Image.class);
   for (IObjectContainer container : containers) {
     Image image = (Image) container.sourceObject;
     assertNotNull(image.getAcquisitionDate());
     Date acquisitionDate = new Date(image.getAcquisitionDate().getValue());
     Date now = new Date(System.currentTimeMillis());
     Date january1st1995 = new GregorianCalendar(1995, 1, 1).getTime();
     if (acquisitionDate.after(now)) {
       fail(String.format("%s after %s", acquisitionDate, now));
     }
     if (acquisitionDate.before(january1st1995)) {
       fail(String.format("%s before %s", acquisitionDate, january1st1995));
     }
   }
 }
 /**
  * Retrieves the first managed repository from the list of current active repositories.
  *
  * @return Active proxy for the legacy repository.
  */
 public ManagedRepositoryPrx lookupManagedRepository() {
   try {
     ManagedRepositoryPrx rv = null;
     ServiceFactoryPrx sf = store.getServiceFactory();
     RepositoryMap map = sf.sharedResources().repositories();
     for (int i = 0; i < map.proxies.size(); i++) {
       RepositoryPrx proxy = map.proxies.get(i);
       if (proxy != null) {
         rv = ManagedRepositoryPrxHelper.checkedCast(proxy);
         if (rv != null) {
           return rv;
         }
       }
     }
     return null;
   } catch (ServerError e) {
     throw new RuntimeException(e);
   }
 }
  public Object generateThumbnails(ThumbnailGenerationJob tgj) throws Throwable {

    List<Long> plateIds = new ArrayList<Long>();
    Image image = pixList.get(0).getImage();
    if (image.sizeOfWellSamples() > 0) {
      Plate plate = image.copyWellSamples().get(0).getWell().getPlate();
      plateIds.add(plate.getId().getValue());
    }

    notifyObservers(new ImportEvent.IMPORT_OVERLAYS(0, null, userSpecifiedTarget, null, 0, null));
    importOverlays(pixList, plateIds);

    notifyObservers(new ImportEvent.IMPORT_PROCESSING(0, null, userSpecifiedTarget, null, 0, null));
    if (doThumbnails) {
      store.resetDefaultsAndGenerateThumbnails(plateIds, pixelIds());
    } else {
      log.warn("Not creating thumbnails at user request!");
    }

    return null;
  }
  /**
   * Upload files to the managed repository.
   *
   * <p>This is done by first passing in the possibly absolute local file paths. A common selection
   * of those are chosen and passed back to the client.
   *
   * <p>As each file is written to the server, a message digest is kept updated of the bytes that
   * are being written. These are then returned to the caller so they can be checked against the
   * values found on the server.
   *
   * @param container The current import container we're to handle.
   * @return A list of the client-side (i.e. local) hashes for each file.
   */
  public List<String> uploadFilesToRepository(
      final String[] srcFiles, final ImportProcessPrx proc) {
    final byte[] buf = new byte[store.getDefaultBlockSize()];
    final int fileTotal = srcFiles.length;
    final List<String> checksums = new ArrayList<String>(fileTotal);
    // TODO Fix with proper code instead of 10000L
    final TimeEstimator estimator = new ProportionalTimeEstimatorImpl(10000L);

    log.debug("Used files created:");
    for (int i = 0; i < fileTotal; i++) {
      try {
        checksums.add(uploadFile(proc, srcFiles, i, checksumProviderFactory, estimator, buf));
      } catch (ServerError e) {
        log.error("Server error uploading file.", e);
        break;
      } catch (IOException e) {
        log.error("I/O error uploading file.", e);
        break;
      }
    }
    return checksums;
  }