private void resolveRoots(ModelTreeNode modelTreeNode) throws IOException { ZdoModel model = modelTreeNode.getModel(); // Get kdr version of this doc String kdrUrl = model.get(ZdoTerms.kdrObject); ZdoModel kdrDoc = store.get(kdrUrl); // If there is also a published version of this doc, find it, remove it and adopt all its // children String oldModelUrl = kdrDoc.get(ZdoTerms.newestPublished); if (oldModelUrl != null) { // Mark the old published doc as unpublished ZdoModel oldModel = store.get(oldModelUrl); if (ZdoGroup.ZDO.name().equals(oldModel.get(ZdoTerms.group))) { markAsUnpublished(oldModel, kdrDoc); store.update(oldModel); } } resolveChildren(modelTreeNode, oldModelUrl); // Set to KDR doc that this will be its published version kdrDoc.replaceValueOfProperty( ZdoTerms.newestPublished, store.removeTransactionFromUrl(model.getUrl())); store.update(kdrDoc); }
private void resolveChildren(ModelTreeNode modelTreeNode, String oldModelUrl) throws IOException { ZdoModel model = modelTreeNode.getModel(); // If there is also a published version of this doc, find it, remove it and adopt all its // children if (oldModelUrl != null) { // Change its children to be ours String queryString = "SELECT ?subject WHERE {\n" + " ?subject <" + DCTerms.isPartOf.getURI() + "> <" + oldModelUrl + ">.\n" + "}"; QueryExecution queryExecution = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT, queryString); ResultSet resultSet = queryExecution.execSelect(); while (resultSet.hasNext()) { QuerySolution querySolution = resultSet.next(); RDFNode childNode = querySolution.get("subject"); String childToAdoptUrl = childNode.asResource().getURI(); ZdoModel childModel = store.get(childToAdoptUrl); childModel.replaceValueOfProperty( DCTerms.isPartOf, store.removeTransactionFromUrl(model.getUrl())); // If this children was published if (ZdoGroup.ZDO.name().equals(childModel.get(ZdoTerms.group))) { // Is this children getting replaced by newer version? if (modelTreeNodeKdrIndex.containsKey(childModel.get(ZdoTerms.kdrObject))) { // Yes, unpublish it ZdoModel childKdrObject = store.get(childModel.get(ZdoTerms.kdrObject)); markAsUnpublished(childModel, childKdrObject); store.update(childKdrObject); } else { // No, it should be added to our tree to solr ModelTreeNode childModelTreeNode = new ModelTreeNode(); childModelTreeNode.setModel(childModel); modelTreeNodeKdrIndex.put(childModel.get(ZdoTerms.kdrObject), childModelTreeNode); modelTreeNodeIndex.put( store.removeTransactionFromUrl(childModel.getUrl()), modelTreeNode); modelTreeNode.getChildren().add(childModelTreeNode); } } store.update(childModel); } } // Recurse on children for (ModelTreeNode childNode : modelTreeNode.getChildren()) { // Get kdr version of this doc ZdoModel kdrDoc = store.get(childNode.getModel().get(ZdoTerms.kdrObject)); resolveChildren(childNode, kdrDoc.get(ZdoTerms.newestPublished)); kdrDoc.replaceValueOfProperty( ZdoTerms.newestPublished, store.removeTransactionFromUrl(childNode.getModel().getUrl())); store.update(kdrDoc); } }
public void deleteImage(int articleId) throws IOException { Article article = em.find(Article.class, articleId); if (article == null) { throw new RuntimeException("No such article"); } if (!article.getOwner().equals(plAccess.getUser().getLoginName())) { throw new RuntimeException("The user has no such article."); } String oldImageId = article.getImageId(); if (oldImageId != null) { store.delete(store.createUrl(oldImageId)); } article.setImageId(null); }
public void uploadImage(int articleId, MultipartFormDataInput input) throws IOException { Article article = em.find(Article.class, articleId); if (article == null) { throw new RuntimeException("No such article"); } if (!article.getOwner().equals(plAccess.getUser().getLoginName())) { throw new RuntimeException("The user has no such article."); } String oldImageId = article.getImageId(); if (oldImageId != null) { store.delete(store.createUrl(oldImageId)); } String uuid = imgUploadTools.uploadedFileToFedora(input, ZdoFileType.articleImage); article.setImageId(uuid); }
private ModelTreeNode addNodeToStructure(ZdoModel model) throws IOException { // Was this node already parsed? ModelTreeNode modelTreeNode = modelTreeNodeIndex.get(store.removeTransactionFromUrl(model.getUrl())); if (modelTreeNode != null) { return modelTreeNode; } // Can it be published? if (!"true".equals(model.get(ZdoTerms.validToPublish))) { throw new RuntimeException("This model is not ready for publishing!"); } // Create tree structure with model for Solr modelTreeNode = new ModelTreeNode(); modelTreeNode.setModel(model); modelTreeNodeIndex.put(store.removeTransactionFromUrl(model.getUrl()), modelTreeNode); modelTreeNodeKdrIndex.put(model.get(ZdoTerms.kdrObject), modelTreeNode); // Parse ancestors and add us as their children String parentUrl = model.get(DCTerms.isPartOf); if (parentUrl != null) { ZdoModel parent = store.get(parentUrl); ModelTreeNode parentNode = addNodeToStructure(parent); parentNode.getChildren().add(modelTreeNode); } else { // This is a root dataToIndex.add(modelTreeNode); } // Published periodicals and volumes shouldn't have batch if (!ZdoType.isBranchEndCategory(model.get(ZdoTerms.zdoType))) { model.removeAllValuesOfProperty(ZdoTerms.batchId); } // Mark doc as published model.replaceValueOfProperty(ZdoTerms.group, ZdoGroup.ZDO.name()); store.update(model); // Leaf children like page and binary don't need to be changed, but images must be tiled if (ZdoType.isBranchEndCategory(model.get(ZdoTerms.zdoType))) { imagesToTile.addAll( triplestoreStuff.fetchUrlsOfTileableImageDescendants(model.get(ZdoTerms.kdrObject))); } return modelTreeNode; }
// We cant wait for this because fedora transaction times out public void process(String url) { try { if (url == null || outputFolder == null) { logger.error("Missing source url or output folder."); return; } File targetFolder = new File(outputFolder, dataStore.createDeepPath(dataStore.getOnlyIdFromUrl(url))); if (targetFolder.exists()) { logger.debug("Image is already tiled, skipping."); return; } ZdoModel metadata = dataStore.get(url + "/fcr:metadata"); if (metadata == null) { throw new IOException("Image metadata does not exist."); } ImageType type; switch (metadata.get(ZdoTerms.mimeType)) { case "image/jpeg": type = ImageType.JPEG; break; case "image/jp2": type = ImageType.JPEG2000; break; default: throw new IOException("Unsupported image format '{" + url + "}' detected."); } try (InputStream in = new URL(url).openStream()) { TiledImage image = tiler.tileImage(in, type); imageWriter.output(image, targetFolder); } } catch (final IOException e) { logger.error("Error processing JMS event!", e); } }
public List<ZdoModel> parse(final CollectionType collectionType, final String orgShortcut) { if (collectionType.getRecords().size() != 1) { throw new RuntimeException("Marc collection contained more than one record."); } RecordType record = collectionType.getRecords().get(0); MarcToDcConverter marcToDcConverter = new MarcToDcConverter(); ZdoModel zdoModel = new ZdoModel(store.createUrl(UUID.randomUUID().toString())); marcToDcConverter.convert(record, orgShortcut, zdoModel); // String inventoryId = zdoModel.get(ZdoTerms.inventoryId); List<ZdoModel> resultList = new ArrayList<>(); resultList.add(zdoModel); return resultList; }
private void markAsUnpublished(ZdoModel model, ZdoModel kdrModel) throws IOException { if (ZdoGroup.ZDO.name().equals(model.get(ZdoTerms.group))) { model.replaceValueOfProperty(ZdoTerms.group, ZdoGroup.UNPUBLISHED.name()); // Unlock kdr object kdrModel.decreaseLockCount(); String pdfUrl = kdrModel.get(ZdoTerms.pdfUrl); String epubUrl = kdrModel.get(ZdoTerms.epubUrl); if (pdfUrl != null) { store.delete(pdfUrl); kdrModel.removeAllValuesOfProperty(ZdoTerms.pdfUrl); } if (epubUrl != null) { store.delete(epubUrl); kdrModel.removeAllValuesOfProperty(ZdoTerms.epubUrl); } // And unindex original published doc if (ZdoType.isRootCategory(model.get(ZdoTerms.zdoType))) { rootsToDelete.add(store.getOnlyIdFromUrl(model.getUrl())); } } }
@Transactional public void publishBatch(int batchId) throws IOException { if (userUnderWhichToPublish == null) { userUnderWhichToPublish = plAccess.getUser().getLoginName(); } ZdoBatch batch = em.find(ZdoBatch.class, batchId); if (batch == null || batch.isDeleted() /*todo || !batch.getOwner().equals(identity.getAccount().getId())*/) { throw new RuntimeException("Bad batch id to publish."); } if (!batch.getState().equals(ZdoBatch.BatchState.unfinished)) { throw new RuntimeException("Cant publish discarded or published batch."); } // Statistics collectors Map<String, Integer> zdoTypesCount = new HashMap<>(); Map<String, Integer> documentTypesCount = new HashMap<>(); Map<String, Integer> documentSubTypesCount = new HashMap<>(); rootsToDelete = new ArrayList<>(); imagesToTile = new ArrayList<>(); modelTreeNodeIndex = new HashMap<>(); modelTreeNodeKdrIndex = new HashMap<>(); dataToIndex = new ArrayList<>(); List<FeedDataHolder> feedData = new ArrayList<>(); // Rss and Atom feed data about newly published documents // Construct a forest from models that need to be published - bottom to top for (String docId : batch.getDocuments()) { ZdoModel model = store.get(store.createUrl(docId)); storeStatistic(model, ZdoTerms.zdoType, zdoTypesCount); feedData.add(createFeedData(model)); addNodeToStructure(model); } // Store stats - doctypes of root models for (ModelTreeNode modelTreeNode : dataToIndex) { ZdoModel model = modelTreeNode.getModel(); if (ZdoType.periodical .name() .equals( model.get( ZdoTerms .zdoType))) { // periodicals are not counted when end branch docs are, so we // must count them now storeStatistic(model, ZdoTerms.zdoType, zdoTypesCount); } storeStatistic(model, ZdoTerms.documentType, documentTypesCount); storeStatistic(model, ZdoTerms.documentSubType, documentSubTypesCount); } // Unpublish documents being replaced and adopt their children - top to bottom for (ModelTreeNode modelTreeNode : dataToIndex) { resolveRoots(modelTreeNode); } // After all is done in fedora, index it in Solr rootsToDelete.forEach(ExWrapper.accept(indexer::remove)); dataToIndex.forEach(ExWrapper.accept(indexer::update)); // Aaand, tile the images Response response = ClientBuilder.newClient() .target(IP_ENDPOINT + "process") .request() .post(Entity.json(imagesToTile)); if (response.getStatusInfo().getFamily() != Response.Status.Family.SUCCESSFUL) { throw new RuntimeException("Failed to call image processing war."); } batch.setState(ZdoBatch.BatchState.published); batch.setModified(LocalDateTime.now()); // Record statistics String org = plAccess.getOrganizationOfUser(plAccess.getUser(userUnderWhichToPublish)).getName(); statsAccess.incrementUserDocsPublished(userUnderWhichToPublish, batch.getNumDocs()); statsAccess.incrementOrganizationDocsPublished(org, batch.getNumDocs()); statsAccessCommon.documentPublished(userUnderWhichToPublish, org, batch.getNumDocs()); for (String zdoTypeStr : zdoTypesCount.keySet()) { statsAccess.incrementZdoTypeUsage( ZdoType.valueOf(zdoTypeStr), org, zdoTypesCount.get(zdoTypeStr)); } for (String docTypeStr : documentTypesCount.keySet()) { statsAccess.incrementDocTypeUsage( Integer.valueOf(docTypeStr), org, documentTypesCount.get(docTypeStr)); } for (String docSubTypeStr : documentSubTypesCount.keySet()) { statsAccess.incrementDocSubTypeUsage( Integer.valueOf(docSubTypeStr), org, documentSubTypesCount.get(docSubTypeStr)); } createFeedEntryAboutBatch(feedData); }