Ejemplo n.º 1
0
  /*
   * @see org.alfresco.repo.content.ContentStore#getWriter(org.alfresco.repo.content.ContentContext)
   */
  @Override
  public ContentWriter getWriter(final ContentContext context) {
    if (cacheOnInbound) {
      final ContentWriter bsWriter = backingStore.getWriter(context);

      // write to cache
      final ContentWriter cacheWriter = cache.getWriter(bsWriter.getContentUrl());

      cacheWriter.addListener(
          new ContentStreamListener() {
            @Override
            public void contentStreamClosed() throws ContentIOException {
              // Finished writing to the cache, so copy to the backing store -
              // ensuring that the encoding attributes are set to the same as for the cache writer.
              bsWriter.setEncoding(cacheWriter.getEncoding());
              bsWriter.setLocale(cacheWriter.getLocale());
              bsWriter.setMimetype(cacheWriter.getMimetype());
              bsWriter.putContent(cacheWriter.getReader());
            }
          });

      return cacheWriter;
    } else {
      // No need to invalidate the cache for this content URL, since a content URL
      // is only ever written to once.
      return backingStore.getWriter(context);
    }
  }
  @Override
  public NodeRef createNode(
      Resource resource,
      RepositoryLocation targetLocation,
      String encoding,
      String mimetype,
      QName nodeType)
      throws IOException {
    NodeRef rootNode = nodeService.getRootNode(targetLocation.getStoreRef());
    final List<NodeRef> parentNodes =
        searchService.selectNodes(
            rootNode, targetLocation.getPath(), null, namespaceService, false);
    Assert.isTrue(parentNodes.size() == 1, "Target location leads to not 1 unique Node reference");

    final String fileName = resource.getFilename();
    final FileInfo fileInfo = fileFolderService.create(parentNodes.get(0), fileName, nodeType);
    final NodeRef nodeRef = fileInfo.getNodeRef();

    final ContentWriter writer = contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
    writer.putContent(resource.getInputStream());

    if (mimetype == null) {
      mimetype = guessMimetype(resource);
    }

    if (encoding == null) {
      encoding = guessEncoding(resource.getInputStream(), mimetype);
    }

    writer.setMimetype(mimetype);
    writer.setEncoding(encoding);

    return nodeRef;
  }
Ejemplo n.º 3
0
  /** Saves any changes made to the content data */
  /*package*/ void onSave() {
    if (this.isDirty == true) {
      if (logger.isDebugEnabled() == true) {
        logger.debug(
            "ContentData.onSave() - Getting content writer (node="
                + this.node.getId()
                + "; property="
                + this.property);
      }

      // Get the content writer
      ContentWriter contentWriter =
          this.contentService.getWriter(
              this.node.getNodeRef(), QName.createQName(this.property), true);
      if (contentWriter == null) {
        throw new PHPProcessorException(
            "Unable to get content writer for property "
                + this.property
                + " on node "
                + this.node.toString());
      }

      if (logger.isDebugEnabled() == true) {
        logger.debug(
            "ContentData.onSave() - Setting encoding and mimetype (node="
                + this.node.getId()
                + "; property="
                + this.property);
      }

      // Set the encoding and mimetype
      contentWriter.setEncoding(this.encoding);
      contentWriter.setMimetype(this.mimetype);

      // Put the updated content, id it had been updated
      if (this.updatedContentString != null) {
        if (logger.isDebugEnabled() == true) {
          logger.debug(
              "ContentData.onSave() - Putting text content (node="
                  + this.node.getId()
                  + "; property="
                  + this.property);
          logger.debug("ContentData.onSave() - updatedContentString=" + this.updatedContentString);
        }

        contentWriter.putContent(this.updatedContentString);
      } else if (this.updatedContentLocation != null) {
        // TODO .. handle loading content from the file location ....
      }

      // Clear the content date since the content has now been updated
      this.updatedContentLocation = null;
      this.updatedContentString = null;
      this.isDirty = false;
    }
  }
  public void writeContent(String path, InputStream content) {
    PersistenceManagerService persistenceManagerService =
        _servicesManager.getService(PersistenceManagerService.class);
    // NodeRef nodeRef = persistenceManagerService.getNodeRef(path);

    // if (nodeRef != null) {
    ContentWriter writer = persistenceManagerService.getWriter(path);
    writer.putContent(content);
    // }
  }
Ejemplo n.º 5
0
  @Override
  protected void persistNode(NodeRef nodeRef, FormData data) {

    // let superclass persist properties
    super.persistNode(nodeRef, data);

    // implements File field persistance
    int fileFieldCount = 0;
    for (FieldData fieldData : data) {
      // NOTE: ignore file fields for now, not supported yet!
      if (fieldData.isFile() == true && fieldData instanceof CustomFormData.FieldData) {
        CustomFormData.FieldData cfd = (CustomFormData.FieldData) fieldData;
        if (fileFieldCount == 0) {
          InputStream inputStream = cfd.getInputStream();
          try {
            if (inputStream.available() > 0) {

              ContentWriter writer =
                  this.contentService.getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
              String mimetype = cfd.getMimetype();

              logger.debug("write content in :" + nodeRef);
              logger.debug("mimeType :" + mimetype);
              logger.debug("encoding :" + writer.getEncoding());

              writer.setMimetype(mimetype);
              writer.putContent(inputStream);
            }
          } catch (InvalidTypeException e1) {
            logger.error(e1);
          } catch (ContentIOException e1) {
            logger.error(e1);
          } catch (InvalidNodeRefException e1) {
            logger.error(e1);
          } catch (IOException e1) {
            logger.error(e1);
          } finally {
            try {
              inputStream.close();
            } catch (IOException e) {
              logger.error("trying to close inputStream fail", e);
            }
          }

        } else {
          // TODO multi file upload not implemented yet
        }
        fileFieldCount++;
      }
    }
  }
Ejemplo n.º 6
0
  private NodeRef createDocument(String documentName, String documentContent, String mimetype) {
    NodeRef textDocument =
        fileFolderService
            .create(rootNodeRef, "TEXT" + documentName, ContentModel.TYPE_CONTENT)
            .getNodeRef();
    ContentWriter contentWriter = fileFolderService.getWriter(textDocument);
    contentWriter.setEncoding("UTF-8");
    contentWriter.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
    contentWriter.setLocale(Locale.ENGLISH);
    contentWriter.putContent(documentContent);
    ContentReader contentReader = fileFolderService.getReader(textDocument);
    // contentReader will not be null as an exception will have been thrown if there was a problem

    NodeRef document =
        fileFolderService.create(rootNodeRef, documentName, ContentModel.TYPE_CONTENT).getNodeRef();
    contentWriter = fileFolderService.getWriter(document);
    contentWriter.setEncoding("UTF-8");
    contentWriter.setMimetype(mimetype);
    contentWriter.setLocale(Locale.ENGLISH);

    TransformationOptions options = new TransformationOptions();
    options.setSourceNodeRef(textDocument);

    if (contentService.isTransformable(contentReader, contentWriter, options)) {
      contentService.transform(contentReader, contentWriter, options);
    }

    fileFolderService.delete(textDocument);

    return document;
  }
Ejemplo n.º 7
0
  public void testProtectedRemoval() throws Exception {
    cleaner.setProtectDays(1);
    // add some content to the store
    ContentWriter writer = store.getWriter(ContentStore.NEW_CONTENT_CONTEXT);
    writer.putContent("ABC");
    String contentUrl = writer.getContentUrl();

    // fire the cleaner
    cleaner.execute();

    // the content should have disappeared as it is not in the database
    assertTrue("Protected content was deleted", store.exists(contentUrl));
    assertFalse(
        "Content listener was called with deletion of protected URL",
        deletedUrls.contains(contentUrl));
  }
Ejemplo n.º 8
0
  @Override
  public ContentWriter getWriter(final String url) {
    // Get a writer to a cache file.
    final File cacheFile = createCacheFile();
    ContentWriter writer = new FileContentWriter(cacheFile, url, null);

    // Attach a listener to populate the in-memory store when done writing.
    writer.addListener(
        new ContentStreamListener() {
          @Override
          public void contentStreamClosed() throws ContentIOException {
            recordCacheEntries(url, cacheFile);
          }
        });

    return writer;
  }
  /** Asks Tika to translate the contents into HTML */
  private void generateHTML(Parser p, RenderingContext context) {
    ContentReader contentReader = context.makeContentReader();

    // Setup things to parse with
    StringWriter sw = new StringWriter();
    ContentHandler handler = buildContentHandler(sw, context);

    // Tell Tika what we're dealing with
    Metadata metadata = new Metadata();
    metadata.set(Metadata.CONTENT_TYPE, contentReader.getMimetype());
    metadata.set(
        Metadata.RESOURCE_NAME_KEY,
        nodeService.getProperty(context.getSourceNode(), ContentModel.PROP_NAME).toString());

    // Our parse context needs to extract images
    ParseContext parseContext = new ParseContext();
    parseContext.set(Parser.class, new TikaImageExtractingParser(context));

    // Parse
    try {
      p.parse(contentReader.getContentInputStream(), handler, metadata, parseContext);
    } catch (Exception e) {
      throw new RenditionServiceException("Tika HTML Conversion Failed", e);
    }

    // As a string
    String html = sw.toString();

    // If we're doing body-only, remove all the html namespaces
    //  that will otherwise clutter up the document
    boolean bodyOnly = context.getParamWithDefault(PARAM_BODY_CONTENTS_ONLY, false);
    if (bodyOnly) {
      html = html.replaceAll("<\\?xml.*?\\?>", "");
      html = html.replaceAll("<p xmlns=\"http://www.w3.org/1999/xhtml\"", "<p");
      html = html.replaceAll("<h(\\d) xmlns=\"http://www.w3.org/1999/xhtml\"", "<h\\1");
      html = html.replaceAll("<div xmlns=\"http://www.w3.org/1999/xhtml\"", "<div");
      html = html.replaceAll("<table xmlns=\"http://www.w3.org/1999/xhtml\"", "<table");
      html = html.replaceAll("&#13;", "");
    }

    // Save it
    ContentWriter contentWriter = context.makeContentWriter();
    contentWriter.setMimetype("text/html");
    contentWriter.putContent(html);
  }
  private NodeRef createEmbeddedImage(
      NodeRef imgFolder,
      boolean primary,
      String filename,
      String contentType,
      InputStream imageSource,
      RenderingContext context) {
    // Create the node if needed
    NodeRef img = nodeService.getChildByName(imgFolder, ContentModel.ASSOC_CONTAINS, filename);
    if (img == null) {
      Map<QName, Serializable> properties = new HashMap<QName, Serializable>();
      properties.put(ContentModel.PROP_NAME, filename);
      img =
          nodeService
              .createNode(
                  imgFolder,
                  ContentModel.ASSOC_CONTAINS,
                  QName.createQName(filename),
                  ContentModel.TYPE_CONTENT,
                  properties)
              .getChildRef();
      if (logger.isDebugEnabled()) {
        logger.debug("Image node created: " + img);
      }
    }

    // TODO Once composite content is properly supported,
    //  at this point we'll associate the new image with
    //  the rendered HTML node so the dependency is tracked.

    // Put the image into the node
    ContentWriter writer = contentService.getWriter(img, ContentModel.PROP_CONTENT, true);
    writer.setMimetype(contentType);
    writer.putContent(imageSource);
    if (logger.isDebugEnabled()) {
      logger.debug("Image content written into " + img);
    }

    // All done
    return img;
  }
  /**
   * Test attachment extraction with a TNEF message
   *
   * @throws Exception
   */
  public void testAttachmentExtraction() throws Exception {
    AuthenticationUtil.setRunAsUserSystem();
    /** Load a TNEF message */
    ClassPathResource fileResource = new ClassPathResource("imap/test-tnef-message.eml");
    assertNotNull("unable to find test resource test-tnef-message.eml", fileResource);
    InputStream is = new FileInputStream(fileResource.getFile());
    MimeMessage message = new MimeMessage(Session.getDefaultInstance(new Properties()), is);

    /** Create a test node containing the message */
    String storePath = "workspace://SpacesStore";
    String companyHomePathInStore = "/app:company_home";
    StoreRef storeRef = new StoreRef(storePath);
    NodeRef storeRootNodeRef = nodeService.getRootNode(storeRef);

    List<NodeRef> nodeRefs =
        searchService.selectNodes(
            storeRootNodeRef, companyHomePathInStore, null, namespaceService, false);
    NodeRef companyHomeNodeRef = nodeRefs.get(0);

    FileInfo f1 =
        fileFolderService.create(
            companyHomeNodeRef, "ImapServiceImplTest", ContentModel.TYPE_FOLDER);
    FileInfo d2 =
        fileFolderService.create(f1.getNodeRef(), "ImapServiceImplTest", ContentModel.TYPE_FOLDER);
    FileInfo f2 =
        fileFolderService.create(
            f1.getNodeRef(), "test-tnef-message.eml", ContentModel.TYPE_CONTENT);

    ContentWriter writer = fileFolderService.getWriter(f2.getNodeRef());
    writer.putContent(new FileInputStream(fileResource.getFile()));

    NodeRef folder = imapService.extractAttachments(f1.getNodeRef(), f2.getNodeRef(), message);
    assertNotNull(folder);

    List<FileInfo> files = fileFolderService.listFiles(folder);
    assertTrue("three files not found", files.size() == 3);
  }
 /*
  * @see org.alfresco.repo.rendition.executer.AbstractRenderingEngine#render(org
  * .alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.rendition.RenditionDefinition,
  * org.alfresco.service.cmr.repository.ContentReader, org.alfresco.service.cmr.repository.ChildAssociationRef)
  */
 @Override
 protected void render(RenderingContext context) {
   NodeRef templateNode = getTemplateNode(context);
   Writer writer = null;
   try {
     Object model = buildModel(context);
     ContentWriter contentWriter = context.makeContentWriter();
     writer = new OutputStreamWriter(contentWriter.getContentOutputStream());
     processTemplate(context, templateNode, model, writer);
   } catch (RuntimeException ex) {
     throw ex;
   } catch (Exception ex) {
     log.warn("Unexpected error while rendering through XSLT rendering engine.", ex);
   } finally {
     if (writer != null) {
       try {
         writer.flush();
         writer.close();
       } catch (IOException ex) {
         log.warn("Failed to correctly close content writer.", ex);
       }
     }
   }
 }
Ejemplo n.º 13
0
  private AlfrescoRuntimeException signFile(
      final NodeRef nodeRefToSign,
      final DigitalSigningDTO signingDTO,
      final File alfTempDir,
      final String alias,
      final KeyStore ks,
      final PrivateKey key,
      final Certificate[] chain) {
    final String fileNameToSign = fileFolderService.getFileInfo(nodeRefToSign).getName();

    File fileConverted = null;
    File tempDir = null;
    try {
      ContentReader fileToSignContentReader = getReader(nodeRefToSign);

      if (fileToSignContentReader != null) {
        String newName = null;

        // Check if document is PDF or transform it
        if (!MimetypeMap.MIMETYPE_PDF.equals(fileToSignContentReader.getMimetype())) {
          // Transform document in PDF document
          final ContentTransformer tranformer =
              contentTransformerRegistry.getTransformer(
                  fileToSignContentReader.getMimetype(),
                  fileToSignContentReader.getSize(),
                  MimetypeMap.MIMETYPE_PDF,
                  new TransformationOptions());

          if (tranformer != null) {

            tempDir = new File(alfTempDir.getPath() + File.separatorChar + nodeRefToSign.getId());
            if (tempDir != null) {
              tempDir.mkdir();
              fileConverted =
                  new File(tempDir, fileNameToSign + "_" + System.currentTimeMillis() + ".pdf");
              if (fileConverted != null) {
                final ContentWriter newDoc = new FileContentWriter(fileConverted);
                if (newDoc != null) {
                  newDoc.setMimetype(MimetypeMap.MIMETYPE_PDF);
                  tranformer.transform(fileToSignContentReader, newDoc);
                  fileToSignContentReader = new FileContentReader(fileConverted);

                  final String originalName =
                      (String) nodeService.getProperty(nodeRefToSign, ContentModel.PROP_NAME);

                  newName = originalName.substring(0, originalName.lastIndexOf(".")) + ".pdf";
                }
              }
            }
          } else {
            log.error(
                "["
                    + fileNameToSign
                    + "] No suitable converter found to convert the document in PDF.");
            return new AlfrescoRuntimeException(
                "["
                    + fileNameToSign
                    + "] No suitable converter found to convert the document in PDF.");
          }
        }

        // Convert PDF in PDF/A format
        final File pdfAFile = convertPdfToPdfA(fileToSignContentReader.getContentInputStream());

        final PdfReader reader = new PdfReader(new FileInputStream(pdfAFile));

        if (nodeRefToSign != null) {
          tempDir = new File(alfTempDir.getPath() + File.separatorChar + nodeRefToSign.getId());
          if (tempDir != null) {
            tempDir.mkdir();
            final File file = new File(tempDir, fileNameToSign);

            if (file != null) {
              final FileOutputStream fout = new FileOutputStream(file);
              final PdfStamper stp = PdfStamper.createSignature(reader, fout, '\0');

              if (stp != null) {
                final PdfSignatureAppearance sap = stp.getSignatureAppearance();
                if (sap != null) {
                  sap.setCrypto(key, chain, null, PdfSignatureAppearance.WINCER_SIGNED);
                  sap.setReason(signingDTO.getSignReason());
                  sap.setLocation(signingDTO.getSignLocation());
                  sap.setContact(signingDTO.getSignContact());
                  sap.setCertificationLevel(PdfSignatureAppearance.CERTIFIED_NO_CHANGES_ALLOWED);
                  sap.setImageScale(1);

                  // digital signature
                  if (signingDTO.getSigningField() != null
                      && !signingDTO.getSigningField().trim().equalsIgnoreCase("")) {
                    Image img = null;
                    if (signingDTO.getImage() != null) {
                      final ContentReader imageContentReader = getReader(signingDTO.getImage());
                      final AcroFields af = reader.getAcroFields();
                      if (af != null) {
                        final List<FieldPosition> positions =
                            af.getFieldPositions(signingDTO.getSigningField());
                        if (positions != null
                            && positions.size() > 0
                            && positions.get(0) != null
                            && positions.get(0).position != null) {
                          final BufferedImage newImg =
                              scaleImage(
                                  ImageIO.read(imageContentReader.getContentInputStream()),
                                  BufferedImage.TYPE_INT_RGB,
                                  Float.valueOf(positions.get(0).position.getWidth()).intValue(),
                                  Float.valueOf(positions.get(0).position.getHeight()).intValue());
                          img = Image.getInstance(newImg, null);
                        } else {
                          log.error(
                              "["
                                  + fileNameToSign
                                  + "] The field '"
                                  + signingDTO.getSigningField()
                                  + "' doesn't exist in the document.");
                          return new AlfrescoRuntimeException(
                              "["
                                  + fileNameToSign
                                  + "] The field '"
                                  + signingDTO.getSigningField()
                                  + "' doesn't exist in the document.");
                        }
                      }
                      if (img == null) {
                        img =
                            Image.getInstance(
                                ImageIO.read(imageContentReader.getContentInputStream()), null);
                      }
                      sap.setImage(img);
                    }
                    sap.setVisibleSignature(signingDTO.getSigningField());
                  } else {
                    int pageToSign = 1;
                    if (DigitalSigningDTO.PAGE_LAST.equalsIgnoreCase(
                        signingDTO.getPages().trim())) {
                      pageToSign = reader.getNumberOfPages();
                    } else if (DigitalSigningDTO.PAGE_SPECIFIC.equalsIgnoreCase(
                        signingDTO.getPages().trim())) {
                      if (signingDTO.getPageNumber() > 0
                          && signingDTO.getPageNumber() <= reader.getNumberOfPages()) {
                        pageToSign = signingDTO.getPageNumber();
                      } else {
                        throw new AlfrescoRuntimeException("Page number is out of bound.");
                      }
                    }
                    if (signingDTO.getImage() != null) {
                      final ContentReader imageContentReader = getReader(signingDTO.getImage());
                      // Resize image
                      final BufferedImage newImg =
                          scaleImage(
                              ImageIO.read(imageContentReader.getContentInputStream()),
                              BufferedImage.TYPE_INT_RGB,
                              signingDTO.getSignWidth(),
                              signingDTO.getSignHeight());
                      final Image img = Image.getInstance(newImg, null);
                      sap.setImage(img);
                    }
                    if (signingDTO.getPosition() != null
                        && !DigitalSigningDTO.POSITION_CUSTOM.equalsIgnoreCase(
                            signingDTO.getPosition().trim())) {
                      final Rectangle pageRect = reader.getPageSizeWithRotation(1);
                      sap.setVisibleSignature(
                          positionSignature(
                              signingDTO.getPosition(),
                              pageRect,
                              signingDTO.getSignWidth(),
                              signingDTO.getSignHeight(),
                              signingDTO.getxMargin(),
                              signingDTO.getyMargin()),
                          pageToSign,
                          null);
                    } else {
                      sap.setVisibleSignature(
                          new Rectangle(
                              signingDTO.getLocationX(),
                              signingDTO.getLocationY(),
                              signingDTO.getLocationX() + signingDTO.getSignWidth(),
                              signingDTO.getLocationY() - signingDTO.getSignHeight()),
                          pageToSign,
                          null);
                    }
                  }
                  stp.close();

                  NodeRef destinationNode = null;
                  NodeRef originalDoc = null;
                  boolean addAsNewVersion = false;
                  if (signingDTO.getDestinationFolder() == null) {
                    destinationNode = nodeRefToSign;
                    nodeService.addAspect(destinationNode, ContentModel.ASPECT_VERSIONABLE, null);
                    addAsNewVersion = true;
                  } else {
                    originalDoc = nodeRefToSign;
                    destinationNode =
                        createDestinationNode(
                            file.getName(), signingDTO.getDestinationFolder(), nodeRefToSign);
                  }

                  if (destinationNode != null) {

                    final ContentWriter writer =
                        contentService.getWriter(destinationNode, ContentModel.PROP_CONTENT, true);
                    if (writer != null) {
                      writer.setEncoding(fileToSignContentReader.getEncoding());
                      writer.setMimetype("application/pdf");
                      writer.putContent(file);
                      file.delete();

                      if (fileConverted != null) {
                        fileConverted.delete();
                      }

                      nodeService.addAspect(
                          destinationNode,
                          SigningModel.ASPECT_SIGNED,
                          new HashMap<QName, Serializable>());
                      nodeService.setProperty(
                          destinationNode, SigningModel.PROP_REASON, signingDTO.getSignReason());
                      nodeService.setProperty(
                          destinationNode,
                          SigningModel.PROP_LOCATION,
                          signingDTO.getSignLocation());
                      nodeService.setProperty(
                          destinationNode, SigningModel.PROP_SIGNATUREDATE, new java.util.Date());
                      nodeService.setProperty(
                          destinationNode,
                          SigningModel.PROP_SIGNEDBY,
                          AuthenticationUtil.getRunAsUser());

                      if (newName != null) {
                        nodeService.setProperty(destinationNode, ContentModel.PROP_NAME, newName);
                      }

                      final X509Certificate c = (X509Certificate) ks.getCertificate(alias);
                      nodeService.setProperty(
                          destinationNode, SigningModel.PROP_VALIDITY, c.getNotAfter());
                      nodeService.setProperty(
                          destinationNode, SigningModel.PROP_ORIGINAL_DOC, originalDoc);

                      if (!addAsNewVersion) {
                        if (!nodeService.hasAspect(originalDoc, SigningModel.ASPECT_ORIGINAL_DOC)) {
                          nodeService.addAspect(
                              originalDoc,
                              SigningModel.ASPECT_ORIGINAL_DOC,
                              new HashMap<QName, Serializable>());
                        }
                        nodeService.createAssociation(
                            originalDoc, destinationNode, SigningModel.PROP_RELATED_DOC);
                      }
                    }
                  } else {
                    log.error("[" + fileNameToSign + "] Destination node is not a valid NodeRef.");
                    return new AlfrescoRuntimeException(
                        "[" + fileNameToSign + "] Destination node is not a valid NodeRef.");
                  }
                } else {
                  log.error("[" + fileNameToSign + "] Unable to get PDF appearance signature.");
                  return new AlfrescoRuntimeException(
                      "[" + fileNameToSign + "] Unable to get PDF appearance signature.");
                }
              } else {
                log.error("[" + fileNameToSign + "] Unable to create PDF signature.");
                return new AlfrescoRuntimeException(
                    "[" + fileNameToSign + "] Unable to create PDF signature.");
              }
            }
          }
        } else {
          log.error("[" + fileNameToSign + "] Unable to get document to sign content.");
          return new AlfrescoRuntimeException(
              "[" + fileNameToSign + "] Unable to get document to sign content.");
        }

        if (pdfAFile != null) {
          pdfAFile.delete();
        }

        return null;

      } else {
        log.error("[" + fileNameToSign + "] The document has no content.");
        return new AlfrescoRuntimeException(
            "[" + fileNameToSign + "] The document has no content.");
      }
    } catch (KeyStoreException e) {
      log.error("[" + fileNameToSign + "] " + e);
      return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + e.getMessage(), e);
    } catch (ContentIOException e) {
      log.error("[" + fileNameToSign + "] " + e);
      return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + e.getMessage(), e);
    } catch (IOException e) {
      log.error("[" + fileNameToSign + "] " + e);
      return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + e.getMessage(), e);
    } catch (DocumentException e) {
      log.error("[" + fileNameToSign + "] " + e);
      return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + e.getMessage(), e);
    } finally {
      if (tempDir != null) {
        try {
          tempDir.delete();
        } catch (Exception ex) {
          log.error("[" + fileNameToSign + "] " + ex);
          return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + ex.getMessage(), ex);
        }
      }
    }
  }
  /**
   * @param reader
   * @param writer
   * @param options
   * @throws Exception
   */
  protected final void action(
      Action ruleAction,
      NodeRef actionedUponNodeRef,
      ContentReader reader,
      Map<String, Object> options) {
    PDDocument pdf = null;
    InputStream is = null;
    File tempDir = null;
    ContentWriter writer = null;

    try {
      // Get the split frequency
      int splitFrequency = 0;

      String splitFrequencyString = options.get(PARAM_SPLIT_AT_PAGE).toString();
      if (!splitFrequencyString.equals("")) {
        try {
          splitFrequency = Integer.valueOf(splitFrequencyString);
        } catch (NumberFormatException e) {
          throw new AlfrescoRuntimeException(e.getMessage(), e);
        }
      }

      // Get contentReader inputStream
      is = reader.getContentInputStream();
      // stream the document in
      pdf = PDDocument.load(is);
      // split the PDF and put the pages in a list
      Splitter splitter = new Splitter();
      // Need to adjust the input value to get the split at the right page
      splitter.setSplitAtPage(splitFrequency - 1);

      // Split the pages
      List<PDDocument> pdfs = splitter.split(pdf);

      // Start page split numbering at
      int page = 1;

      // build a temp dir, name based on the ID of the noderef we are
      // importing
      File alfTempDir = TempFileProvider.getTempDir();
      tempDir = new File(alfTempDir.getPath() + File.separatorChar + actionedUponNodeRef.getId());
      tempDir.mkdir();

      // FLAG: This is ugly.....get the first PDF.
      PDDocument firstPDF = (PDDocument) pdfs.remove(0);

      int pagesInFirstPDF = firstPDF.getNumberOfPages();

      String lastPage = "";
      String pg = "_pg";

      if (pagesInFirstPDF > 1) {
        pg = "_pgs";
        lastPage = "-" + pagesInFirstPDF;
      }

      String fileNameSansExt = getFilenameSansExt(actionedUponNodeRef, FILE_EXTENSION);
      firstPDF.save(
          tempDir
              + ""
              + File.separatorChar
              + fileNameSansExt
              + pg
              + page
              + lastPage
              + FILE_EXTENSION);

      try {
        firstPDF.close();
      } catch (IOException e) {
        throw new AlfrescoRuntimeException(e.getMessage(), e);
      }

      // FLAG: Like I said: "_UGLY_" ..... and it gets worse
      PDDocument secondPDF = null;

      Iterator<PDDocument> its = pdfs.iterator();

      int pagesInSecondPDF = 0;

      while (its.hasNext()) {
        if (secondPDF != null) {
          // Get the split document and save it into the temp dir with
          // new name
          PDDocument splitpdf = (PDDocument) its.next();

          int pagesInThisPDF = splitpdf.getNumberOfPages();
          pagesInSecondPDF = pagesInSecondPDF + pagesInThisPDF;

          PDFMergerUtility merger = new PDFMergerUtility();
          merger.appendDocument(secondPDF, splitpdf);
          merger.mergeDocuments();

          try {
            splitpdf.close();
          } catch (IOException e) {
            throw new AlfrescoRuntimeException(e.getMessage(), e);
          }

        } else {
          secondPDF = (PDDocument) its.next();

          pagesInSecondPDF = secondPDF.getNumberOfPages();
        }
      }

      if (pagesInSecondPDF > 1) {

        pg = "_pgs";
        lastPage = "-" + (pagesInSecondPDF + pagesInFirstPDF);

      } else {
        pg = "_pg";
        lastPage = "";
      }

      // This is where we should save the appended PDF
      // put together the name and save the PDF
      secondPDF.save(
          tempDir
              + ""
              + File.separatorChar
              + fileNameSansExt
              + pg
              + splitFrequency
              + lastPage
              + FILE_EXTENSION);

      for (File file : tempDir.listFiles()) {
        try {
          if (file.isFile()) {
            // Get a writer and prep it for putting it back into the
            // repo
            NodeRef destinationNode =
                createDestinationNode(
                    file.getName(),
                    (NodeRef) ruleAction.getParameterValue(PARAM_DESTINATION_FOLDER),
                    actionedUponNodeRef);
            writer =
                serviceRegistry
                    .getContentService()
                    .getWriter(destinationNode, ContentModel.PROP_CONTENT, true);

            writer.setEncoding(reader.getEncoding()); // original
            // encoding
            writer.setMimetype(FILE_MIMETYPE);

            // Put it in the repo
            writer.putContent(file);

            // Clean up
            file.delete();
          }
        } catch (FileExistsException e) {
          throw new AlfrescoRuntimeException("Failed to process file.", e);
        }
      }
    } catch (COSVisitorException e) {
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } catch (IOException e) {
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } finally {
      if (pdf != null) {
        try {
          pdf.close();
        } catch (IOException e) {
          throw new AlfrescoRuntimeException(e.getMessage(), e);
        }
      }
      if (is != null) {
        try {
          is.close();
        } catch (IOException e) {
          throw new AlfrescoRuntimeException(e.getMessage(), e);
        }
      }

      if (tempDir != null) {
        tempDir.delete();
      }
    }
  }
  /** On setup in transaction implementation */
  @Override
  protected void onSetUpInTransaction() throws Exception {
    // Set the services
    this.nodeService = (NodeService) this.applicationContext.getBean("nodeService");
    this.cociService =
        (CheckOutCheckInService) this.applicationContext.getBean("checkOutCheckInService");
    this.contentService = (ContentService) this.applicationContext.getBean("contentService");
    this.versionService = (VersionService) this.applicationContext.getBean("versionService");
    this.authenticationService =
        (MutableAuthenticationService) this.applicationContext.getBean("authenticationService");
    this.lockService = (LockService) this.applicationContext.getBean("lockService");
    this.transactionService =
        (TransactionService) this.applicationContext.getBean("transactionComponent");
    this.permissionService =
        (PermissionService) this.applicationContext.getBean("permissionService");
    this.copyService = (CopyService) this.applicationContext.getBean("copyService");

    // Authenticate as system to create initial test data set
    AuthenticationComponent authenticationComponent =
        (AuthenticationComponent) this.applicationContext.getBean("authenticationComponent");
    authenticationComponent.setSystemUserAsCurrentUser();

    // Create the store and get the root node reference
    this.storeRef =
        nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, "Test_" + System.currentTimeMillis());
    this.rootNodeRef = nodeService.getRootNode(storeRef);

    // Create the node used for tests
    ChildAssociationRef childAssocRef =
        nodeService.createNode(
            rootNodeRef,
            ContentModel.ASSOC_CHILDREN,
            QName.createQName("test"),
            ContentModel.TYPE_CONTENT);
    this.nodeRef = childAssocRef.getChildRef();
    nodeService.addAspect(this.nodeRef, ContentModel.ASPECT_TITLED, null);
    nodeService.setProperty(this.nodeRef, ContentModel.PROP_NAME, TEST_VALUE_NAME);
    nodeService.setProperty(this.nodeRef, PROP2_QNAME, TEST_VALUE_2);

    // Add the initial content to the node
    ContentWriter contentWriter =
        this.contentService.getWriter(this.nodeRef, ContentModel.PROP_CONTENT, true);
    contentWriter.setMimetype("text/plain");
    contentWriter.setEncoding("UTF-8");
    contentWriter.putContent(CONTENT_1);

    // Add the lock and version aspects to the created node
    nodeService.addAspect(this.nodeRef, ContentModel.ASPECT_VERSIONABLE, null);
    nodeService.addAspect(this.nodeRef, ContentModel.ASPECT_LOCKABLE, null);

    // Create and authenticate the user
    this.userName = "******" + GUID.generate();
    TestWithUserUtils.createUser(
        this.userName, PWD, this.rootNodeRef, this.nodeService, this.authenticationService);
    TestWithUserUtils.authenticateUser(
        this.userName, PWD, this.rootNodeRef, this.authenticationService);
    this.userNodeRef = TestWithUserUtils.getCurrentUser(this.authenticationService);

    permissionService.setPermission(
        this.rootNodeRef, this.userName, PermissionService.ALL_PERMISSIONS, true);
    permissionService.setPermission(
        this.nodeRef, this.userName, PermissionService.ALL_PERMISSIONS, true);

    folderNodeRef =
        nodeService
            .createNode(
                rootNodeRef,
                ContentModel.ASSOC_CHILDREN,
                QName.createQName("test"),
                ContentModel.TYPE_FOLDER,
                Collections.<QName, Serializable>singletonMap(ContentModel.PROP_NAME, "folder"))
            .getChildRef();
    fileNodeRef =
        nodeService
            .createNode(
                folderNodeRef,
                ContentModel.ASSOC_CONTAINS,
                QName.createQName("test"),
                ContentModel.TYPE_CONTENT,
                Collections.<QName, Serializable>singletonMap(ContentModel.PROP_NAME, "file"))
            .getChildRef();
    contentWriter = this.contentService.getWriter(fileNodeRef, ContentModel.PROP_CONTENT, true);
    contentWriter.setMimetype("text/plain");
    contentWriter.setEncoding("UTF-8");
    contentWriter.putContent(CONTENT_1);
  }
Ejemplo n.º 16
0
  /** Upload an encoded Base64 file to the repository and decode it back once it's uploaded. */
  @Override
  protected Map<String, Object> executeImpl(WebScriptRequest req, Status status, Cache cache) {
    HashMap<String, Object> model = new HashMap<String, Object>();
    UserTransaction trx = serviceRegistry.getTransactionService().getUserTransaction();

    try {
      trx.begin();
      System.out.println(trx.hashCode());
      Element args = Arguments.getArguments(req);
      String username =
          args.getElementsByTagName(FORM_PARAM_USERNAME).item(0).getFirstChild().getNodeValue();

      model.put(FTL_USERNAME, username);
      Impersonate.impersonate(username);

      String ref = DocumentUtils.pujarDocumentBase64(req, args, username).trim();
      NodeRef nodeRef = new NodeRef(ref);

      Map<QName, Serializable> props = serviceRegistry.getNodeService().getProperties(nodeRef);
      ContentReader reader =
          serviceRegistry.getContentService().getReader(nodeRef, ContentModel.PROP_CONTENT);
      byte[] contentDecoded =
          es.mityc.firmaJava.libreria.utilidades.Base64.decode(reader.getContentString());
      ContentWriter writer =
          serviceRegistry.getContentService().getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
      writer.putContent(new ByteArrayInputStream(contentDecoded));

      serviceRegistry.getOwnableService().setOwner(nodeRef, username);

      Context cx = Context.enter();
      Scriptable scope = cx.initStandardObjects();
      ScriptNode document = new ScriptNode(nodeRef, serviceRegistry, scope);

      model.put(FTL_DOCUMENT, document);
      model.put(FTL_SUCCESS, String.valueOf(true));

      // Auditar creación de documento
      String type = document.getTypeShort();
      String site = document.getSiteShortName();
      if (type.equals("udl:documentSimple")) {
        AuditUdl.auditRecord(
            auditComponent,
            username,
            document.getNodeRef().toString(),
            AUDIT_ACTION_CREATE_DOCUMENT_SIMPLE,
            type,
            site);
        QName qNameIdDocSimple =
            QName.createQName(
                "http://www.smile.com/model/udl/1.0", "secuencial_identificador_documentSimple");
        String idDocSimple =
            (String) serviceRegistry.getNodeService().getProperty(nodeRef, qNameIdDocSimple);

        if ("".equals(idDocSimple) || idDocSimple == null) {
          // serviceRegistry.getNodeService().deleteNode(nodeRef);
          throw new Exception("Error obtenint identificador via WebService.");
        }
      }

      trx.commit();

    } catch (Exception e) {
      e.printStackTrace();
      model.put(FTL_ERROR, e.getMessage());
      model.put(FTL_SUCCESS, String.valueOf(false));

      try {
        if (trx.getStatus() == javax.transaction.Status.STATUS_ACTIVE) {
          System.out.println(trx.hashCode());
          trx.rollback();
        }
      } catch (SystemException ex) {
        e.printStackTrace();
      }
    }

    return model;
  }
Ejemplo n.º 17
0
  /**
   * Write exception transfer report
   *
   * @return NodeRef the node ref of the new transfer report
   */
  public NodeRef createTransferReport(
      Exception e,
      TransferTarget target,
      TransferDefinition definition,
      List<TransferEvent> events,
      File snapshotFile) {
    Map<QName, Serializable> properties = new HashMap<QName, Serializable>();

    SimpleDateFormat format = new SimpleDateFormat("yyyyMMddhhmmssSSSZ");
    String timeNow = format.format(new Date());

    String title = "Transfer report, error,  " + timeNow;
    String description = "Transfer error report";
    String name = "Transfer error report, " + timeNow;

    properties.put(ContentModel.PROP_NAME, name);
    properties.put(ContentModel.PROP_TITLE, title);
    properties.put(ContentModel.PROP_DESCRIPTION, description);
    ChildAssociationRef ref =
        nodeService.createNode(
            target.getNodeRef(),
            ContentModel.ASSOC_CONTAINS,
            QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, name),
            TransferModel.TYPE_TRANSFER_REPORT,
            properties);
    ContentWriter writer =
        contentService.getWriter(ref.getChildRef(), ContentModel.PROP_CONTENT, true);
    writer.setLocale(Locale.getDefault());
    writer.setMimetype(MimetypeMap.MIMETYPE_XML);
    writer.setEncoding(DEFAULT_ENCODING);

    //
    XMLTransferReportWriter reportWriter = new XMLTransferReportWriter();

    BufferedWriter bufferedWriter =
        new BufferedWriter(new OutputStreamWriter(writer.getContentOutputStream()));

    try {
      reportWriter.startTransferReport(DEFAULT_ENCODING, bufferedWriter);

      // Header
      reportWriter.writeTarget(target);

      reportWriter.writeDefinition(definition);

      reportWriter.writeException(e);

      // Detail
      reportWriter.writeTransferEvents(events);

      reportWriter.endTransferReport();

      return ref.getChildRef();
    } catch (SAXException se) {
      return null;
    } finally {
      try {
        bufferedWriter.close();
      } catch (IOException error) {
        error.printStackTrace();
      }
    }
  }
Ejemplo n.º 18
0
  /**
   * Create a new transfer report of success
   *
   * @return NodeRef the node ref of the new transfer report
   */
  public NodeRef createTransferReport(
      Transfer transfer,
      TransferTarget target,
      TransferDefinition definition,
      List<TransferEvent> events,
      File snapshotFile) {
    Map<QName, Serializable> properties = new HashMap<QName, Serializable>();

    SimpleDateFormat format = new SimpleDateFormat("yyyyMMddhhmmssSSSZ");
    String timeNow = format.format(new Date());

    String title = "Transfer report, " + timeNow + "success";
    String description = "Transfer report success targetName : " + target.getName();
    String name = "Transfer report, " + timeNow;

    properties.put(ContentModel.PROP_NAME, name);
    properties.put(ContentModel.PROP_TITLE, title);
    properties.put(ContentModel.PROP_DESCRIPTION, description);
    ChildAssociationRef ref =
        nodeService.createNode(
            target.getNodeRef(),
            ContentModel.ASSOC_CONTAINS,
            QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, name),
            TransferModel.TYPE_TRANSFER_REPORT,
            properties);
    ContentWriter writer =
        contentService.getWriter(ref.getChildRef(), ContentModel.PROP_CONTENT, true);
    writer.setLocale(Locale.getDefault());
    writer.setMimetype(MimetypeMap.MIMETYPE_XML);
    writer.setEncoding(DEFAULT_ENCODING);

    //
    final XMLTransferReportWriter reportWriter = new XMLTransferReportWriter();

    BufferedWriter bufferedWriter =
        new BufferedWriter(new OutputStreamWriter(writer.getContentOutputStream()));

    try {
      reportWriter.startTransferReport(DEFAULT_ENCODING, bufferedWriter);

      // Header
      reportWriter.writeTarget(target);

      reportWriter.writeDefinition(definition);

      /** Write the node summary details to the transfer report */
      TransferManifestProcessor processor =
          new TransferManifestProcessor() {
            public void processTransferManifestNode(TransferManifestNormalNode node) {

              try {
                reportWriter.writeNodeSummary(node);
              } catch (SAXException error) {
                error.printStackTrace();
              }
            }

            public void processTransferManifestNode(TransferManifestDeletedNode node) {
              try {
                reportWriter.writeNodeSummary(node);
              } catch (SAXException error) {
                error.printStackTrace();
              }
            }

            public void processTransferManifiestHeader(TransferManifestHeader header) {
              /* NO-OP */
            }

            public void startTransferManifest() {
              /* NO-OP */
            }

            public void endTransferManifest() {
              /* NO-OP */
            }
          };

      /** Step 3: wire up the manifest reader to a manifest processor */
      SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
      SAXParser parser;
      parser = saxParserFactory.newSAXParser();
      XMLTransferManifestReader reader = new XMLTransferManifestReader(processor);

      /** Step 4: start the magic Give the manifest file to the manifest reader */
      try {
        parser.parse(snapshotFile, reader);
      } catch (IOException error) {
        // TODO temp code
        error.printStackTrace();
        return null;
      }

      // Detail Events
      reportWriter.writeTransferEvents(events);

      reportWriter.endTransferReport();

      return ref.getChildRef();
    } catch (SAXException se) {
      // TODO Temp code
      return null;
    } catch (ParserConfigurationException error) {
      // TODO temp code
      error.printStackTrace();
      return null;
    } finally {
      try {
        bufferedWriter.close();
      } catch (IOException error) {
        error.printStackTrace();
      }
    }
  }
  /** Test checkIn */
  public void testCheckIn() {
    NodeRef workingCopy = checkout();

    // Test standard check-in
    Map<String, Serializable> versionProperties = new HashMap<String, Serializable>();
    versionProperties.put(Version.PROP_DESCRIPTION, "This is a test version");
    cociService.checkin(workingCopy, versionProperties);

    // Test check-in with content
    NodeRef workingCopy3 = checkout();

    nodeService.setProperty(workingCopy3, PROP_NAME_QNAME, TEST_VALUE_2);
    nodeService.setProperty(workingCopy3, PROP2_QNAME, TEST_VALUE_3);
    ContentWriter tempWriter =
        this.contentService.getWriter(workingCopy3, ContentModel.PROP_CONTENT, false);
    assertNotNull(tempWriter);
    tempWriter.putContent(CONTENT_2);
    String contentUrl = tempWriter.getContentUrl();
    Map<String, Serializable> versionProperties3 = new HashMap<String, Serializable>();
    versionProperties3.put(Version.PROP_DESCRIPTION, "description");
    versionProperties3.put(VersionModel.PROP_VERSION_TYPE, VersionType.MAJOR);
    NodeRef origNodeRef = cociService.checkin(workingCopy3, versionProperties3, contentUrl, true);
    assertNotNull(origNodeRef);

    // Check the checked in content
    ContentReader contentReader =
        this.contentService.getReader(origNodeRef, ContentModel.PROP_CONTENT);
    assertNotNull(contentReader);
    assertEquals(CONTENT_2, contentReader.getContentString());

    // Check that the version history is correct
    Version version = this.versionService.getCurrentVersion(origNodeRef);
    assertNotNull(version);
    assertEquals("description", version.getDescription());
    assertEquals(VersionType.MAJOR, version.getVersionType());
    NodeRef versionNodeRef = version.getFrozenStateNodeRef();
    assertNotNull(versionNodeRef);

    // Check the verioned content
    ContentReader versionContentReader =
        this.contentService.getReader(versionNodeRef, ContentModel.PROP_CONTENT);
    assertNotNull(versionContentReader);
    assertEquals(CONTENT_2, versionContentReader.getContentString());

    // Check that the name is not updated during the check-in
    assertEquals(TEST_VALUE_2, nodeService.getProperty(versionNodeRef, PROP_NAME_QNAME));
    assertEquals(TEST_VALUE_2, nodeService.getProperty(origNodeRef, PROP_NAME_QNAME));

    // Check that the other properties are updated during the check-in
    assertEquals(TEST_VALUE_3, nodeService.getProperty(versionNodeRef, PROP2_QNAME));
    assertEquals(TEST_VALUE_3, nodeService.getProperty(origNodeRef, PROP2_QNAME));

    // Cancel the check out after is has been left checked out
    cociService.cancelCheckout(workingCopy3);

    // Test keep checked out flag
    NodeRef workingCopy2 = checkout();
    Map<String, Serializable> versionProperties2 = new HashMap<String, Serializable>();
    versionProperties2.put(Version.PROP_DESCRIPTION, "Another version test");
    this.cociService.checkin(workingCopy2, versionProperties2, null, true);
    this.cociService.checkin(workingCopy2, new HashMap<String, Serializable>(), null, true);
  }
  /**
   * @see
   *     org.alfresco.repo.content.transform.AbstractContentTransformer2#transformInternal(org.alfresco.service.cmr.repository.ContentReader,
   *     org.alfresco.service.cmr.repository.ContentWriter,
   *     org.alfresco.service.cmr.repository.TransformationOptions)
   */
  @Override
  public void transformInternal(
      ContentReader reader, ContentWriter writer, TransformationOptions options) throws Exception {
    final String outputMimetype = writer.getMimetype();
    final String outputFileExt = getMimetypeService().getExtension(outputMimetype);

    // We need to keep a reference to thrown exceptions as we're going to catch them and
    // then move on to the next transformer. In the event that they all fail, we will throw
    // the final exception.
    Exception transformationException = null;

    for (int i = 0; i < transformers.size(); i++) {
      int oneBasedCount = i + 1;
      ContentTransformer transf = transformers.get(i);
      ContentWriter currentWriter = null;
      File tempFile = null;
      try {
        if (logger.isDebugEnabled()) {
          logger.debug(
              "Transformation attempt "
                  + oneBasedCount
                  + " of "
                  + transformers.size()
                  + ": "
                  + transf);
        }

        if (!transf.isTransformable(
            reader.getMimetype(), reader.getSize(), outputMimetype, options)) {
          throw new UnsupportedTransformationException(
              "Unsupported transformation: " + reader.getMimetype() + " to " + outputMimetype);
        }

        // We can't know in advance which transformer in the sequence will work - if any.
        // Therefore we can't write into the ContentWriter stream.
        // So make a temporary file writer with the current transformer name.
        tempFile =
            TempFileProvider.createTempFile(
                "FailoverTransformer_intermediate_" + transf.getClass().getSimpleName() + "_",
                "." + outputFileExt);
        currentWriter = new FileContentWriter(tempFile);
        currentWriter.setMimetype(outputMimetype);
        currentWriter.setEncoding(writer.getEncoding());

        // attempt to transform
        transf.transform(reader, currentWriter, options);

        // TODO Could add a check for zero-length output and treat that as a failure
        // final long writtenSize = currentWriter.getSize();
      } catch (Exception are) {
        if (transformationException == null) {
          transformationException = are;
        }

        if (logger.isDebugEnabled()) {
          logger.debug("Transformation " + oneBasedCount + " was unsuccessful.");
          if (i != transformers.size() - 1) {
            // We don't log the last exception as we're going to throw it.
            logger.debug("The below exception is provided for information purposes only.", are);
          }
        }

        // Set a new reader to refresh the input stream.
        reader = reader.getReader();
        // and move to the next transformer
        continue;
      }
      // No need to close input or output streams

      // At this point the current transformation was successful i.e. it did not throw an exception.

      // Now we must copy the content from the temporary file into the ContentWriter stream.
      if (tempFile != null) {
        writer.putContent(tempFile);
      }

      if (logger.isInfoEnabled()) {
        logger.info("Transformation was successful");
      }
      return;
    }
    // At this point we have tried all transformers in the sequence without apparent success.
    if (transformationException != null) {
      transformerDebug.debug("          No more transformations to failover to");
      if (logger.isDebugEnabled()) {
        logger.debug(
            "All transformations were unsuccessful. Throwing first exception.",
            transformationException);
      }
      throw transformationException;
    }
  }
  /**
   * @see
   *     org.alfresco.repo.content.transform.ContentTransformer#transform(org.alfresco.service.cmr.repository.ContentReader,
   *     org.alfresco.service.cmr.repository.ContentWriter,
   *     org.alfresco.service.cmr.repository.TransformationOptions)
   */
  public final void transform(
      ContentReader reader, ContentWriter writer, TransformationOptions options)
      throws ContentIOException {
    try {
      depth.set(depth.get() + 1);

      // begin timing
      long before = System.currentTimeMillis();

      String sourceMimetype = reader.getMimetype();
      String targetMimetype = writer.getMimetype();

      // check options map
      if (options == null) {
        options = new TransformationOptions();
      }

      try {
        if (transformerDebug.isEnabled()) {
          transformerDebug.pushTransform(
              this,
              reader.getContentUrl(),
              sourceMimetype,
              targetMimetype,
              reader.getSize(),
              options);
        }

        // Check the transformability
        checkTransformable(reader, writer, options);

        // Pass on any limits to the reader
        setReaderLimits(reader, writer, options);

        // Transform
        // MNT-12238: CLONE - CLONE - Upload of PPTX causes very high memory usage leading to system
        // instability
        // Limiting transformation up to configured amount of milliseconds to avoid very high RAM
        // consumption
        // and OOM during transforming problematic documents
        TransformationOptionLimits limits =
            getLimits(reader.getMimetype(), writer.getMimetype(), options);

        long timeoutMs = limits.getTimeoutMs();
        if (!useTimeoutThread || (null == limits) || (-1 == timeoutMs)) {
          transformInternal(reader, writer, options);
        } else {
          Future<?> submittedTask = null;
          StreamAwareContentReaderProxy proxiedReader = new StreamAwareContentReaderProxy(reader);
          StreamAwareContentWriterProxy proxiedWriter = new StreamAwareContentWriterProxy(writer);

          try {
            submittedTask =
                getExecutorService()
                    .submit(new TransformInternalCallable(proxiedReader, proxiedWriter, options));
            submittedTask.get(timeoutMs + additionalThreadTimout, TimeUnit.MILLISECONDS);
          } catch (TimeoutException e) {
            releaseResources(submittedTask, proxiedReader, proxiedWriter);
            throw new TimeoutException("Transformation failed due to timeout limit");
          } catch (InterruptedException e) {
            releaseResources(submittedTask, proxiedReader, proxiedWriter);
            throw new InterruptedException(
                "Transformation failed, because the thread of the transformation was interrupted");
          } catch (ExecutionException e) {
            Throwable cause = e.getCause();
            if (cause instanceof TransformInternalCallableException) {
              cause = ((TransformInternalCallableException) cause).getCause();
            }

            throw cause;
          }
        }

        // record time
        long after = System.currentTimeMillis();
        recordTime(sourceMimetype, targetMimetype, after - before);
      } catch (ContentServiceTransientException cste) {
        // A transient failure has occurred within the content transformer.
        // This should not be interpreted as a failure and therefore we should not
        // update the transformer's average time.
        if (logger.isDebugEnabled()) {
          logger.debug(
              "Transformation has been transiently declined: \n"
                  + "   reader: "
                  + reader
                  + "\n"
                  + "   writer: "
                  + writer
                  + "\n"
                  + "   options: "
                  + options
                  + "\n"
                  + "   transformer: "
                  + this);
        }
        // the finally block below will still perform tidyup. Otherwise we're done.
        // We rethrow the exception
        throw cste;
      } catch (UnsupportedTransformationException e) {
        // Don't record an error or even the time, as this is normal in compound transformations.
        transformerDebug.debug("          Failed", e);
        throw e;
      } catch (Throwable e) {
        // Make sure that this transformation gets set back i.t.o. time taken.
        // This will ensure that transformers that compete for the same transformation
        // will be prejudiced against transformers that tend to fail
        long after = System.currentTimeMillis();
        recordError(sourceMimetype, targetMimetype, after - before);

        // Ask Tika to detect the document, and report back on if
        //  the current mime type is plausible
        String differentType = getMimetypeService().getMimetypeIfNotMatches(reader.getReader());

        // Report the error
        if (differentType == null) {
          transformerDebug.debug("          Failed", e);
          throw new ContentIOException(
              "Content conversion failed: \n"
                  + "   reader: "
                  + reader
                  + "\n"
                  + "   writer: "
                  + writer
                  + "\n"
                  + "   options: "
                  + options.toString(false)
                  + "\n"
                  + "   limits: "
                  + getLimits(reader, writer, options),
              e);
        } else {
          transformerDebug.debug("          Failed: Mime type was '" + differentType + "'", e);

          if (this.retryTransformOnDifferentMimeType) {
            // MNT-11015 fix.
            // Set a new reader to refresh the input stream.
            reader = reader.getReader();
            // set the actual file MIME type detected by Tika for content reader
            reader.setMimetype(differentType);

            // Get correct transformer according actual file MIME type and try to transform file
            // with
            // actual transformer
            ContentTransformer transformer =
                this.registry.getTransformer(
                    differentType, reader.getSize(), targetMimetype, options);
            if (null != transformer) {
              transformer.transform(reader, writer, options);
            } else {
              transformerDebug.debug("          Failed", e);
              throw new ContentIOException(
                  "Content conversion failed: \n"
                      + "   reader: "
                      + reader
                      + "\n"
                      + "   writer: "
                      + writer
                      + "\n"
                      + "   options: "
                      + options.toString(false)
                      + "\n"
                      + "   limits: "
                      + getLimits(reader, writer, options)
                      + "\n"
                      + "   claimed mime type: "
                      + reader.getMimetype()
                      + "\n"
                      + "   detected mime type: "
                      + differentType
                      + "\n"
                      + "   transformer not found"
                      + "\n",
                  e);
            }
          } else {
            throw new ContentIOException(
                "Content conversion failed: \n"
                    + "   reader: "
                    + reader
                    + "\n"
                    + "   writer: "
                    + writer
                    + "\n"
                    + "   options: "
                    + options.toString(false)
                    + "\n"
                    + "   limits: "
                    + getLimits(reader, writer, options)
                    + "\n"
                    + "   claimed mime type: "
                    + reader.getMimetype()
                    + "\n"
                    + "   detected mime type: "
                    + differentType,
                e);
          }
        }
      } finally {
        transformerDebug.popTransform();

        // check that the reader and writer are both closed
        if (reader.isChannelOpen()) {
          logger.error(
              "Content reader not closed by transformer: \n"
                  + "   reader: "
                  + reader
                  + "\n"
                  + "   transformer: "
                  + this);
        }
        if (writer.isChannelOpen()) {
          logger.error(
              "Content writer not closed by transformer: \n"
                  + "   writer: "
                  + writer
                  + "\n"
                  + "   transformer: "
                  + this);
        }
      }

      // done
      if (logger.isDebugEnabled()) {
        logger.debug(
            "Completed transformation: \n"
                + "   reader: "
                + reader
                + "\n"
                + "   writer: "
                + writer
                + "\n"
                + "   options: "
                + options
                + "\n"
                + "   transformer: "
                + this);
      }
    } finally {
      depth.set(depth.get() - 1);
    }
  }