public List<ZdoModel> parse(final CollectionType collectionType, final String orgShortcut) {
   if (collectionType.getRecords().size() != 1) {
     throw new RuntimeException("Marc collection contained more than one record.");
   }
   RecordType record = collectionType.getRecords().get(0);
   MarcToDcConverter marcToDcConverter = new MarcToDcConverter();
   ZdoModel zdoModel = new ZdoModel(store.createUrl(UUID.randomUUID().toString()));
   marcToDcConverter.convert(record, orgShortcut, zdoModel);
   //        String inventoryId = zdoModel.get(ZdoTerms.inventoryId);
   List<ZdoModel> resultList = new ArrayList<>();
   resultList.add(zdoModel);
   return resultList;
 }
 public void deleteImage(int articleId) throws IOException {
   Article article = em.find(Article.class, articleId);
   if (article == null) {
     throw new RuntimeException("No such article");
   }
   if (!article.getOwner().equals(plAccess.getUser().getLoginName())) {
     throw new RuntimeException("The user has no such article.");
   }
   String oldImageId = article.getImageId();
   if (oldImageId != null) {
     store.delete(store.createUrl(oldImageId));
   }
   article.setImageId(null);
 }
  public void uploadImage(int articleId, MultipartFormDataInput input) throws IOException {
    Article article = em.find(Article.class, articleId);
    if (article == null) {
      throw new RuntimeException("No such article");
    }
    if (!article.getOwner().equals(plAccess.getUser().getLoginName())) {
      throw new RuntimeException("The user has no such article.");
    }
    String oldImageId = article.getImageId();
    if (oldImageId != null) {
      store.delete(store.createUrl(oldImageId));
    }

    String uuid = imgUploadTools.uploadedFileToFedora(input, ZdoFileType.articleImage);
    article.setImageId(uuid);
  }
  @Transactional
  public void publishBatch(int batchId) throws IOException {
    if (userUnderWhichToPublish == null) {
      userUnderWhichToPublish = plAccess.getUser().getLoginName();
    }

    ZdoBatch batch = em.find(ZdoBatch.class, batchId);

    if (batch == null
        || batch.isDeleted() /*todo || !batch.getOwner().equals(identity.getAccount().getId())*/) {
      throw new RuntimeException("Bad batch id to publish.");
    }

    if (!batch.getState().equals(ZdoBatch.BatchState.unfinished)) {
      throw new RuntimeException("Cant publish discarded or published batch.");
    }

    // Statistics collectors
    Map<String, Integer> zdoTypesCount = new HashMap<>();
    Map<String, Integer> documentTypesCount = new HashMap<>();
    Map<String, Integer> documentSubTypesCount = new HashMap<>();

    rootsToDelete = new ArrayList<>();
    imagesToTile = new ArrayList<>();
    modelTreeNodeIndex = new HashMap<>();
    modelTreeNodeKdrIndex = new HashMap<>();
    dataToIndex = new ArrayList<>();
    List<FeedDataHolder> feedData =
        new ArrayList<>(); // Rss and Atom feed data about newly published documents
    // Construct a forest from models that need to be published - bottom to top
    for (String docId : batch.getDocuments()) {
      ZdoModel model = store.get(store.createUrl(docId));

      storeStatistic(model, ZdoTerms.zdoType, zdoTypesCount);

      feedData.add(createFeedData(model));
      addNodeToStructure(model);
    }

    // Store stats - doctypes of root models
    for (ModelTreeNode modelTreeNode : dataToIndex) {
      ZdoModel model = modelTreeNode.getModel();
      if (ZdoType.periodical
          .name()
          .equals(
              model.get(
                  ZdoTerms
                      .zdoType))) { // periodicals are not counted when end branch docs are, so we
        // must count them now
        storeStatistic(model, ZdoTerms.zdoType, zdoTypesCount);
      }
      storeStatistic(model, ZdoTerms.documentType, documentTypesCount);
      storeStatistic(model, ZdoTerms.documentSubType, documentSubTypesCount);
    }

    // Unpublish documents being replaced and adopt their children - top to bottom
    for (ModelTreeNode modelTreeNode : dataToIndex) {
      resolveRoots(modelTreeNode);
    }

    // After all is done in fedora, index it in Solr
    rootsToDelete.forEach(ExWrapper.accept(indexer::remove));
    dataToIndex.forEach(ExWrapper.accept(indexer::update));

    // Aaand, tile the images
    Response response =
        ClientBuilder.newClient()
            .target(IP_ENDPOINT + "process")
            .request()
            .post(Entity.json(imagesToTile));
    if (response.getStatusInfo().getFamily() != Response.Status.Family.SUCCESSFUL) {
      throw new RuntimeException("Failed to call image processing war.");
    }

    batch.setState(ZdoBatch.BatchState.published);
    batch.setModified(LocalDateTime.now());

    // Record statistics
    String org =
        plAccess.getOrganizationOfUser(plAccess.getUser(userUnderWhichToPublish)).getName();
    statsAccess.incrementUserDocsPublished(userUnderWhichToPublish, batch.getNumDocs());
    statsAccess.incrementOrganizationDocsPublished(org, batch.getNumDocs());
    statsAccessCommon.documentPublished(userUnderWhichToPublish, org, batch.getNumDocs());
    for (String zdoTypeStr : zdoTypesCount.keySet()) {
      statsAccess.incrementZdoTypeUsage(
          ZdoType.valueOf(zdoTypeStr), org, zdoTypesCount.get(zdoTypeStr));
    }
    for (String docTypeStr : documentTypesCount.keySet()) {
      statsAccess.incrementDocTypeUsage(
          Integer.valueOf(docTypeStr), org, documentTypesCount.get(docTypeStr));
    }
    for (String docSubTypeStr : documentSubTypesCount.keySet()) {
      statsAccess.incrementDocSubTypeUsage(
          Integer.valueOf(docSubTypeStr), org, documentSubTypesCount.get(docSubTypeStr));
    }

    createFeedEntryAboutBatch(feedData);
  }