/**
  * Create a M2Model from a dictionary model node
  *
  * @param nodeRef the dictionary model node reference
  * @return the M2Model
  */
 public M2Model createM2Model(NodeRef nodeRef) {
   M2Model model = null;
   ContentReader contentReader = this.contentService.getReader(nodeRef, ContentModel.PROP_CONTENT);
   if (contentReader != null) {
     if (contentReader instanceof EmptyContentReader) {
       // belts-and-braces
       logger.error("Failed to create model (due to EmptyContentReader): " + nodeRef);
     } else {
       InputStream is = null;
       try {
         is = contentReader.getContentInputStream();
         model = M2Model.createModel(is);
       } finally {
         if (is != null) {
           try {
             is.close();
           } catch (IOException e) {
             logger.error("Failed to close input stream for " + nodeRef);
           }
         }
       }
     }
   }
   // TODO should we inactivate the model node and put the error somewhere??
   return model;
 }
  @Test
  public void putForZeroLengthFile() {
    ContentReader contentReader = Mockito.mock(ContentReader.class);
    Mockito.when(contentReader.getSize()).thenReturn(0L);

    boolean putResult = contentCache.put("", contentReader);

    assertFalse("Zero length files should not be cached", putResult);
  }
 public String getPersonDescription() {
   ContentService cs =
       Repository.getServiceRegistry(FacesContext.getCurrentInstance()).getContentService();
   ContentReader reader = cs.getReader(this.person.getNodeRef(), ContentModel.PROP_PERSONDESC);
   if (reader != null && reader.exists()) {
     return Utils.stripUnsafeHTMLTags(reader.getContentString()).replace("\r\n", "<p>");
   } else {
     return null;
   }
 }
  @Override
  public boolean put(String contentUrl, ContentReader source) {
    File cacheFile = createCacheFile();

    // Copy the content from the source into a cache file
    if (source.getSize() > 0L) {
      source.getContent(cacheFile);
      // Add a record of the cached file to the in-memory cache.
      recordCacheEntries(contentUrl, cacheFile);
      return true;
    }

    return false;
  }
  public void testImmediateRemoval() throws Exception {
    eagerCleaner.setEagerOrphanCleanup(false);

    final StoreRef storeRef = nodeService.createStore("test", getName() + "-" + GUID.generate());
    RetryingTransactionCallback<ContentData> testCallback =
        new RetryingTransactionCallback<ContentData>() {
          public ContentData execute() throws Throwable {
            // Create some content
            NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
            Map<QName, Serializable> properties = new HashMap<QName, Serializable>(13);
            properties.put(ContentModel.PROP_NAME, (Serializable) "test.txt");
            NodeRef contentNodeRef =
                nodeService
                    .createNode(
                        rootNodeRef,
                        ContentModel.ASSOC_CHILDREN,
                        ContentModel.ASSOC_CHILDREN,
                        ContentModel.TYPE_CONTENT,
                        properties)
                    .getChildRef();
            ContentWriter writer =
                contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
            writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
            writer.putContent("INITIAL CONTENT");
            ContentData contentData = writer.getContentData();

            // Delete the first node
            nodeService.deleteNode(contentNodeRef);

            // Done
            return contentData;
          }
        };
    ContentData contentData =
        transactionService.getRetryingTransactionHelper().doInTransaction(testCallback);
    // Make sure that the content URL still exists
    ContentReader reader = contentService.getRawReader(contentData.getContentUrl());
    assertNotNull(reader);
    assertTrue("Content should not have been eagerly deleted.", reader.exists());

    // fire the cleaner
    cleaner.setProtectDays(0);
    cleaner.execute();

    reader = contentService.getRawReader(contentData.getContentUrl());
    // the content should have disappeared as it is not in the database
    assertFalse("Unprotected content was not deleted", reader.exists());
    assertTrue("Content listener was not called", deletedUrls.contains(reader.getContentUrl()));
  }
  /** @return */
  private NodeRef checkout() {
    // Check out the node
    NodeRef workingCopy =
        cociService.checkout(
            this.nodeRef,
            this.rootNodeRef,
            ContentModel.ASSOC_CHILDREN,
            QName.createQName("workingCopy"));
    assertNotNull(workingCopy);

    // System.out.println(NodeStoreInspector.dumpNodeStore(this.nodeService, this.storeRef));

    // Ensure that the working copy and copy aspect has been applied
    assertTrue(nodeService.hasAspect(workingCopy, ContentModel.ASPECT_WORKING_COPY));
    assertTrue(nodeService.hasAspect(workingCopy, ContentModel.ASPECT_COPIEDFROM));

    // Check that the working copy owner has been set correctly
    assertEquals(
        this.userNodeRef,
        nodeService.getProperty(workingCopy, ContentModel.PROP_WORKING_COPY_OWNER));

    // Check that the working copy name has been set correctly
    String name = (String) this.nodeService.getProperty(this.nodeRef, PROP_NAME_QNAME);
    String expectedWorkingCopyLabel = I18NUtil.getMessage("coci_service.working_copy_label");
    String expectedWorkingCopyName =
        ((CheckOutCheckInServiceImpl) this.cociService)
            .createWorkingCopyName(name, expectedWorkingCopyLabel);
    String workingCopyName = (String) this.nodeService.getProperty(workingCopy, PROP_NAME_QNAME);
    assertEquals(expectedWorkingCopyName, workingCopyName);
    // Check a record has been kept of the working copy label used to create the working copy name
    assertEquals(
        "No record of working copy label kept",
        expectedWorkingCopyLabel,
        nodeService.getProperty(workingCopy, ContentModel.PROP_WORKING_COPY_LABEL));

    // Ensure that the content has been copied correctly
    ContentReader contentReader =
        this.contentService.getReader(this.nodeRef, ContentModel.PROP_CONTENT);
    assertNotNull(contentReader);
    ContentReader contentReader2 =
        this.contentService.getReader(workingCopy, ContentModel.PROP_CONTENT);
    assertNotNull(contentReader2);
    assertEquals(
        "The content string of the working copy should match the original immediatly after checkout.",
        contentReader.getContentString(),
        contentReader2.getContentString());

    return workingCopy;
  }
  @Test
  public void putForNonEmptyFile() {
    ContentReader contentReader = Mockito.mock(ContentReader.class);
    Mockito.when(contentReader.getSize()).thenReturn(999000L);
    final String url = "store://some/url.bin";
    boolean putResult = contentCache.put(url, contentReader);

    assertTrue("Non-empty files should be cached", putResult);
    ArgumentCaptor<File> cacheFileArg = ArgumentCaptor.forClass(File.class);
    Mockito.verify(contentReader).getContent(cacheFileArg.capture());
    // Check cached item is recorded properly in ehcache
    Mockito.verify(lookupTable).put(Key.forUrl(url), cacheFileArg.getValue().getAbsolutePath());
    Mockito.verify(lookupTable)
        .put(Key.forCacheFile(cacheFileArg.getValue().getAbsolutePath()), url);
  }
 /**
  * Convenience method to check the transformability of a transformation
  *
  * @param reader content reader
  * @param writer content writer
  * @param options transformation options
  * @throws AlfrescoRuntimeException if the the transformation isn't supported
  */
 protected void checkTransformable(
     ContentReader reader, ContentWriter writer, TransformationOptions options) {
   String sourceMimetype = getMimetype(reader);
   String targetMimetype = getMimetype(writer);
   long sourceSize = reader.getSize();
   boolean transformable = isTransformable(sourceMimetype, sourceSize, targetMimetype, options);
   if (transformable == false) {
     // This method is only called once a transformer has been selected, so it should be able to
     // handle the mimetypes but might not be able to handle all the limits as it might be part of
     // of a complex (compound) transformer. So report the max size if set.
     long maxSourceSizeKBytes = getMaxSourceSizeKBytes(sourceMimetype, targetMimetype, options);
     boolean sizeOkay =
         maxSourceSizeKBytes < 0
             || (maxSourceSizeKBytes > 0 && sourceSize <= maxSourceSizeKBytes * 1024);
     AlfrescoRuntimeException e =
         new UnsupportedTransformationException(
             "Unsupported transformation: "
                 + getBeanName()
                 + ' '
                 + sourceMimetype
                 + " to "
                 + targetMimetype
                 + ' '
                 + (sizeOkay
                     ? ""
                     : transformerDebug.fileSize(sourceSize)
                         + " > "
                         + transformerDebug.fileSize(maxSourceSizeKBytes * 1024)));
     throw transformerDebug.setCause(e);
   }
   // it all checks out OK
 }
  /** Asks Tika to translate the contents into HTML */
  private void generateHTML(Parser p, RenderingContext context) {
    ContentReader contentReader = context.makeContentReader();

    // Setup things to parse with
    StringWriter sw = new StringWriter();
    ContentHandler handler = buildContentHandler(sw, context);

    // Tell Tika what we're dealing with
    Metadata metadata = new Metadata();
    metadata.set(Metadata.CONTENT_TYPE, contentReader.getMimetype());
    metadata.set(
        Metadata.RESOURCE_NAME_KEY,
        nodeService.getProperty(context.getSourceNode(), ContentModel.PROP_NAME).toString());

    // Our parse context needs to extract images
    ParseContext parseContext = new ParseContext();
    parseContext.set(Parser.class, new TikaImageExtractingParser(context));

    // Parse
    try {
      p.parse(contentReader.getContentInputStream(), handler, metadata, parseContext);
    } catch (Exception e) {
      throw new RenditionServiceException("Tika HTML Conversion Failed", e);
    }

    // As a string
    String html = sw.toString();

    // If we're doing body-only, remove all the html namespaces
    //  that will otherwise clutter up the document
    boolean bodyOnly = context.getParamWithDefault(PARAM_BODY_CONTENTS_ONLY, false);
    if (bodyOnly) {
      html = html.replaceAll("<\\?xml.*?\\?>", "");
      html = html.replaceAll("<p xmlns=\"http://www.w3.org/1999/xhtml\"", "<p");
      html = html.replaceAll("<h(\\d) xmlns=\"http://www.w3.org/1999/xhtml\"", "<h\\1");
      html = html.replaceAll("<div xmlns=\"http://www.w3.org/1999/xhtml\"", "<div");
      html = html.replaceAll("<table xmlns=\"http://www.w3.org/1999/xhtml\"", "<table");
      html = html.replaceAll("&#13;", "");
    }

    // Save it
    ContentWriter contentWriter = context.makeContentWriter();
    contentWriter.setMimetype("text/html");
    contentWriter.putContent(html);
  }
  /*
   * (non-Javadoc)
   * @see org.alfresco.repo.rendition.executer.AbstractRenderingEngine#render(org.alfresco.repo.rendition.executer.AbstractRenderingEngine.RenderingContext)
   */
  @Override
  protected void render(RenderingContext context) {
    ContentReader contentReader = context.makeContentReader();
    String sourceMimeType = contentReader.getMimetype();

    // Check that Tika supports the supplied file
    AutoDetectParser p = new AutoDetectParser(tikaConfig);
    MediaType sourceMediaType = MediaType.parse(sourceMimeType);
    if (!p.getParsers().containsKey(sourceMediaType)) {
      throw new RenditionServiceException(
          "Source mime type of "
              + sourceMimeType
              + " is not supported by Tika for HTML conversions");
    }

    // Make the HTML Version using Tika
    // This will also extract out any images as found
    generateHTML(p, context);
  }
  /**
   * get document from wcm content
   *
   * @param path
   * @return document
   * @throws ServiceException
   */
  public InputStream getContent(String path) {
    InputStream retStream = null;
    PersistenceManagerService persistenceManagerService =
        _servicesManager.getService(PersistenceManagerService.class);
    NodeRef nodeRef = persistenceManagerService.getNodeRef(path);

    if (nodeRef != null) {
      FileInfo fileInfo = persistenceManagerService.getFileInfo(nodeRef);
      if (fileInfo.isFolder()) {
        logger.info(MSG_CONTENT_FOR_FOLDER_REQUESTED, path);
      } else {
        ContentReader reader = persistenceManagerService.getReader(nodeRef);
        retStream = reader.getContentInputStream();
      }
    } else {
      logger.info(MSG_NODE_REF_IS_NULL_FOR_PATH, path);
    }

    return retStream;
  }
  private JSONObject getPreferencesObject(String userName) throws JSONException {
    JSONObject jsonPrefs = null;

    // Get the user node reference
    NodeRef personNodeRef = this.personService.getPerson(userName);
    if (personNodeRef == null) {
      throw new AlfrescoRuntimeException(
          "Cannot get preferences for " + userName + " because he/she does not exist.");
    }

    String currentUserName = AuthenticationUtil.getFullyAuthenticatedUser();
    boolean isSystem =
        AuthenticationUtil.isRunAsUserTheSystemUser()
            || authenticationContext.isSystemUserName(currentUserName);
    if (isSystem
        || userName.equals(currentUserName)
        || personService
            .getUserIdentifier(userName)
            .equals(personService.getUserIdentifier(currentUserName))
        || authorityService.isAdminAuthority(currentUserName)) {
      // Check for preferences aspect
      if (this.nodeService.hasAspect(personNodeRef, ContentModel.ASPECT_PREFERENCES) == true) {
        // Get the preferences for this user
        ContentReader reader =
            this.contentService.getReader(personNodeRef, ContentModel.PROP_PREFERENCE_VALUES);
        if (reader != null) {
          jsonPrefs = new JSONObject(reader.getContentString());
        }
      }
    } else {
      // The current user does not have sufficient permissions to get
      // the preferences for this user
      throw new AccessDeniedException(
          "The current user "
              + currentUserName
              + " does not have sufficient permissions to get the preferences of the user "
              + userName);
    }

    return jsonPrefs;
  }
  @Override
  protected String finishImpl(FacesContext context, String outcome) throws Exception {
    // Try and extract metadata from the file
    ContentReader cr = new FileContentReader(this.file);
    cr.setMimetype(this.mimeType);
    cr.setEncoding(this.encoding);
    // create properties for content type
    Map<QName, Serializable> contentProps = new HashMap<QName, Serializable>(5, 1.0f);

    if (Repository.extractMetadata(FacesContext.getCurrentInstance(), cr, contentProps)) {
      this.author = (String) (contentProps.get(ContentModel.PROP_AUTHOR));
      this.title =
          DefaultTypeConverter.INSTANCE.convert(
              String.class, contentProps.get(ContentModel.PROP_TITLE));
      this.description =
          DefaultTypeConverter.INSTANCE.convert(
              String.class, contentProps.get(ContentModel.PROP_DESCRIPTION));
    }

    // default the title to the file name if not set
    if (this.title == null) {
      this.title = this.fileName;
    }

    // determine whether inline editing should be enabled by default.
    // if the mime type of the added file is in the list of mime types
    // configured in "Content Wizards" then enable inline editing
    List<String> mimeTypes = getInlineEditableMimeTypes();
    if (mimeTypes.contains(this.mimeType)) {
      this.inlineEdit = true;
    }

    saveContent(this.file, null);

    // return default outcome
    return outcome;
  }
  /**
   * Create ContentData set it on a Node, delete the Node, then set the ContentData on a new node
   * and check that the content is preserved during eager cleanup.
   */
  public void testEagerCleanupDereferencing() throws Exception {
    eagerCleaner.setEagerOrphanCleanup(true);

    final StoreRef storeRef = nodeService.createStore("test", getName() + "-" + GUID.generate());
    RetryingTransactionCallback<ContentData> testCallback =
        new RetryingTransactionCallback<ContentData>() {
          public ContentData execute() throws Throwable {
            // Create some content
            NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
            Map<QName, Serializable> properties = new HashMap<QName, Serializable>(13);
            properties.put(ContentModel.PROP_NAME, (Serializable) "test.txt");
            NodeRef contentNodeRef =
                nodeService
                    .createNode(
                        rootNodeRef,
                        ContentModel.ASSOC_CHILDREN,
                        ContentModel.ASSOC_CHILDREN,
                        ContentModel.TYPE_CONTENT,
                        properties)
                    .getChildRef();
            ContentWriter writer =
                contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
            writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
            writer.putContent("INITIAL CONTENT");
            ContentData contentData = writer.getContentData();

            // Delete the first node
            nodeService.deleteNode(contentNodeRef);

            ContentReader reader = contentService.getRawReader(contentData.getContentUrl());
            assertNotNull(reader);
            assertTrue("Content was cleaned before end of transaction", reader.exists());

            // Make a new copy using the same ContentData
            properties.put(ContentModel.PROP_NAME, (Serializable) "test2.txt");
            properties.put(ContentModel.PROP_CONTENT, contentData);
            contentNodeRef =
                nodeService
                    .createNode(
                        rootNodeRef,
                        ContentModel.ASSOC_CHILDREN,
                        ContentModel.ASSOC_CHILDREN,
                        ContentModel.TYPE_CONTENT,
                        properties)
                    .getChildRef();

            reader = contentService.getRawReader(contentData.getContentUrl());
            assertNotNull(reader);
            assertTrue("Content was cleaned before end of transaction", reader.exists());

            // Done
            return contentData;
          }
        };
    ContentData contentData =
        transactionService.getRetryingTransactionHelper().doInTransaction(testCallback);
    // Make sure that the content URL still exists
    ContentReader reader = contentService.getRawReader(contentData.getContentUrl());
    assertNotNull(reader);
    assertTrue(
        "Content was cleaned despite being re-referenced in the transaction", reader.exists());
  }
  @Override
  protected Map<String, Object> executeImpl(
      SiteInfo site,
      String pageTitle,
      WebScriptRequest req,
      JSONObject json,
      Status status,
      Cache cache) {
    Map<String, Object> model = new HashMap<>();

    // Grab the version string
    Map<String, String> templateVars = req.getServiceMatch().getTemplateVars();
    String versionId = templateVars.get("versionId");
    if (versionId == null) {
      String error = "No versionId supplied";
      throw new WebScriptException(Status.STATUS_BAD_REQUEST, error);
    }

    // Try to find the page
    WikiPageInfo page = wikiService.getWikiPage(site.getShortName(), pageTitle);
    if (page == null) {
      String message = "The Wiki Page could not be found";
      status.setCode(Status.STATUS_NOT_FOUND);
      status.setMessage(message);

      // Return an empty string though
      model.put(PARAM_CONTENT, "");
      return model;
    }

    // Fetch the version history for the node
    VersionHistory versionHistory = null;
    Version version = null;
    try {
      versionHistory = versionService.getVersionHistory(page.getNodeRef());
    } catch (AspectMissingException e) {
    }

    if (versionHistory == null) {
      // Not been versioned, return an empty string
      model.put(PARAM_CONTENT, "");
      return model;
    }

    // Fetch the version by either ID or Label
    Matcher m = LABEL_PATTERN.matcher(versionId);
    if (m.matches()) {
      // It's a version label like 2.3
      try {
        version = versionHistory.getVersion(versionId);
      } catch (VersionDoesNotExistException e) {
      }
    } else {
      // It's a version ID like ed00bac1-f0da-4042-8598-45a0d39cb74d
      // (The ID is usually part of the NodeRef of the frozen node, but we
      //  don't assume to be able to just generate the full NodeRef)
      for (Version v : versionHistory.getAllVersions()) {
        if (v.getFrozenStateNodeRef().getId().equals(versionId)) {
          version = v;
        }
      }
    }

    // Did we find the right version in the end?
    String contents;
    if (version != null) {
      ContentReader reader =
          contentService.getReader(version.getFrozenStateNodeRef(), ContentModel.PROP_CONTENT);
      if (reader != null) {
        contents = reader.getContentString();
      } else {
        // No content was stored in the version history
        contents = "";
      }
    } else {
      // No warning of the missing version, just return an empty string
      contents = "";
    }

    // All done
    model.put(PARAM_CONTENT, contents);
    model.put("page", page);
    model.put("site", site);
    model.put("siteId", site.getShortName());
    return model;
  }
  /**
   * @param reader
   * @param writer
   * @param options
   * @throws Exception
   */
  protected final void action(
      Action ruleAction,
      NodeRef actionedUponNodeRef,
      ContentReader reader,
      Map<String, Object> options) {
    PDDocument pdf = null;
    InputStream is = null;
    File tempDir = null;
    ContentWriter writer = null;

    try {
      // Get the split frequency
      int splitFrequency = 0;

      String splitFrequencyString = options.get(PARAM_SPLIT_AT_PAGE).toString();
      if (!splitFrequencyString.equals("")) {
        try {
          splitFrequency = Integer.valueOf(splitFrequencyString);
        } catch (NumberFormatException e) {
          throw new AlfrescoRuntimeException(e.getMessage(), e);
        }
      }

      // Get contentReader inputStream
      is = reader.getContentInputStream();
      // stream the document in
      pdf = PDDocument.load(is);
      // split the PDF and put the pages in a list
      Splitter splitter = new Splitter();
      // Need to adjust the input value to get the split at the right page
      splitter.setSplitAtPage(splitFrequency - 1);

      // Split the pages
      List<PDDocument> pdfs = splitter.split(pdf);

      // Start page split numbering at
      int page = 1;

      // build a temp dir, name based on the ID of the noderef we are
      // importing
      File alfTempDir = TempFileProvider.getTempDir();
      tempDir = new File(alfTempDir.getPath() + File.separatorChar + actionedUponNodeRef.getId());
      tempDir.mkdir();

      // FLAG: This is ugly.....get the first PDF.
      PDDocument firstPDF = (PDDocument) pdfs.remove(0);

      int pagesInFirstPDF = firstPDF.getNumberOfPages();

      String lastPage = "";
      String pg = "_pg";

      if (pagesInFirstPDF > 1) {
        pg = "_pgs";
        lastPage = "-" + pagesInFirstPDF;
      }

      String fileNameSansExt = getFilenameSansExt(actionedUponNodeRef, FILE_EXTENSION);
      firstPDF.save(
          tempDir
              + ""
              + File.separatorChar
              + fileNameSansExt
              + pg
              + page
              + lastPage
              + FILE_EXTENSION);

      try {
        firstPDF.close();
      } catch (IOException e) {
        throw new AlfrescoRuntimeException(e.getMessage(), e);
      }

      // FLAG: Like I said: "_UGLY_" ..... and it gets worse
      PDDocument secondPDF = null;

      Iterator<PDDocument> its = pdfs.iterator();

      int pagesInSecondPDF = 0;

      while (its.hasNext()) {
        if (secondPDF != null) {
          // Get the split document and save it into the temp dir with
          // new name
          PDDocument splitpdf = (PDDocument) its.next();

          int pagesInThisPDF = splitpdf.getNumberOfPages();
          pagesInSecondPDF = pagesInSecondPDF + pagesInThisPDF;

          PDFMergerUtility merger = new PDFMergerUtility();
          merger.appendDocument(secondPDF, splitpdf);
          merger.mergeDocuments();

          try {
            splitpdf.close();
          } catch (IOException e) {
            throw new AlfrescoRuntimeException(e.getMessage(), e);
          }

        } else {
          secondPDF = (PDDocument) its.next();

          pagesInSecondPDF = secondPDF.getNumberOfPages();
        }
      }

      if (pagesInSecondPDF > 1) {

        pg = "_pgs";
        lastPage = "-" + (pagesInSecondPDF + pagesInFirstPDF);

      } else {
        pg = "_pg";
        lastPage = "";
      }

      // This is where we should save the appended PDF
      // put together the name and save the PDF
      secondPDF.save(
          tempDir
              + ""
              + File.separatorChar
              + fileNameSansExt
              + pg
              + splitFrequency
              + lastPage
              + FILE_EXTENSION);

      for (File file : tempDir.listFiles()) {
        try {
          if (file.isFile()) {
            // Get a writer and prep it for putting it back into the
            // repo
            NodeRef destinationNode =
                createDestinationNode(
                    file.getName(),
                    (NodeRef) ruleAction.getParameterValue(PARAM_DESTINATION_FOLDER),
                    actionedUponNodeRef);
            writer =
                serviceRegistry
                    .getContentService()
                    .getWriter(destinationNode, ContentModel.PROP_CONTENT, true);

            writer.setEncoding(reader.getEncoding()); // original
            // encoding
            writer.setMimetype(FILE_MIMETYPE);

            // Put it in the repo
            writer.putContent(file);

            // Clean up
            file.delete();
          }
        } catch (FileExistsException e) {
          throw new AlfrescoRuntimeException("Failed to process file.", e);
        }
      }
    } catch (COSVisitorException e) {
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } catch (IOException e) {
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } finally {
      if (pdf != null) {
        try {
          pdf.close();
        } catch (IOException e) {
          throw new AlfrescoRuntimeException(e.getMessage(), e);
        }
      }
      if (is != null) {
        try {
          is.close();
        } catch (IOException e) {
          throw new AlfrescoRuntimeException(e.getMessage(), e);
        }
      }

      if (tempDir != null) {
        tempDir.delete();
      }
    }
  }
  /**
   * Stream content implementation
   *
   * @param req The request
   * @param res The response
   * @param reader The reader
   * @param nodeRef The content nodeRef if applicable
   * @param propertyQName The content property if applicable
   * @param attach Indicates whether the content should be streamed as an attachment or not
   * @param modified Modified date of content
   * @param eTag ETag to use
   * @param attachFileName Optional file name to use when attach is <code>true</code>
   * @throws IOException
   */
  public void streamContentImpl(
      WebScriptRequest req,
      WebScriptResponse res,
      ContentReader reader,
      final NodeRef nodeRef,
      final QName propertyQName,
      final boolean attach,
      final Date modified,
      String eTag,
      final String attachFileName,
      Map<String, Object> model)
      throws IOException {
    setAttachment(null, res, attach, attachFileName);

    // establish mimetype
    String mimetype = reader.getMimetype();
    String extensionPath = req.getExtensionPath();
    if (mimetype == null || mimetype.length() == 0) {
      mimetype = MimetypeMap.MIMETYPE_BINARY;
      int extIndex = extensionPath.lastIndexOf('.');
      if (extIndex != -1) {
        String ext = extensionPath.substring(extIndex + 1);
        mimetype = mimetypeService.getMimetype(ext);
      }
    }

    res.setHeader(HEADER_ACCEPT_RANGES, "bytes");
    try {
      boolean processedRange = false;
      String range = req.getHeader(HEADER_CONTENT_RANGE);
      final long size = reader.getSize();
      final String encoding = reader.getEncoding();

      if (attach) {
        final String finalMimetype = mimetype;

        eventPublisher.publishEvent(
            new EventPreparator() {
              @Override
              public Event prepareEvent(String user, String networkId, String transactionId) {
                String siteId = siteService.getSiteShortName(nodeRef);

                return new ContentEventImpl(
                    ContentEvent.DOWNLOAD,
                    user,
                    networkId,
                    transactionId,
                    nodeRef.getId(),
                    siteId,
                    propertyQName.toString(),
                    Client.webclient,
                    attachFileName,
                    finalMimetype,
                    size,
                    encoding);
              }
            });
      }

      if (range == null) {
        range = req.getHeader(HEADER_RANGE);
      }
      if (range != null) {
        if (logger.isDebugEnabled()) logger.debug("Found content range header: " + range);

        // ensure the range header is starts with "bytes=" and process the range(s)
        if (range.length() > 6) {
          if (range.indexOf(',') != -1 && (nodeRef == null || propertyQName == null)) {
            if (logger.isInfoEnabled()) logger.info("Multi-range only supported for nodeRefs");
          } else {
            HttpRangeProcessor rangeProcessor = new HttpRangeProcessor(contentService);
            processedRange =
                rangeProcessor.processRange(
                    res,
                    reader,
                    range.substring(6),
                    nodeRef,
                    propertyQName,
                    mimetype,
                    req.getHeader(HEADER_USER_AGENT));
          }
        }
      }
      if (processedRange == false) {
        if (logger.isDebugEnabled()) logger.debug("Sending complete file content...");

        // set mimetype for the content and the character encoding for the stream
        res.setContentType(mimetype);
        res.setContentEncoding(encoding);

        // return the complete entity range
        res.setHeader(
            HEADER_CONTENT_RANGE,
            "bytes 0-" + Long.toString(size - 1L) + "/" + Long.toString(size));
        res.setHeader(HEADER_CONTENT_LENGTH, Long.toString(size));

        // set caching
        setResponseCache(res, modified, eTag, model);

        // get the content and stream directly to the response output stream
        // assuming the repository is capable of streaming in chunks, this should allow large files
        // to be streamed directly to the browser response stream.
        reader.getContent(res.getOutputStream());
      }
    } catch (SocketException e1) {
      // the client cut the connection - our mission was accomplished apart from a little error
      // message
      if (logger.isInfoEnabled()) logger.info("Client aborted stream read:\n\tcontent: " + reader);
    } catch (ContentIOException e2) {
      if (logger.isInfoEnabled()) logger.info("Client aborted stream read:\n\tcontent: " + reader);
    }
  }
  /**
   * Sign file.
   *
   * @param signingDTO sign informations
   * @param pdfSignedFile signed pdf returned
   */
  public void sign(final DigitalSigningDTO signingDTO) {
    if (signingDTO != null) {

      try {
        Security.addProvider(new BouncyCastleProvider());
        final File alfTempDir = TempFileProvider.getTempDir();

        if (alfTempDir != null) {
          final String keyType =
              (String) nodeService.getProperty(signingDTO.getKeyFile(), SigningModel.PROP_KEYTYPE);

          if (SigningConstants.KEY_TYPE_X509.equals(keyType)) {
            // Sign the file
            final KeyStore ks = KeyStore.getInstance("pkcs12");
            final ContentReader keyContentReader = getReader(signingDTO.getKeyFile());

            if (keyContentReader != null && ks != null && signingDTO.getKeyPassword() != null) {

              final List<AlfrescoRuntimeException> errors =
                  new ArrayList<AlfrescoRuntimeException>();

              // Get crypted secret key and decrypt it
              final Serializable encryptedPropertyValue =
                  nodeService.getProperty(
                      signingDTO.getKeyFile(), SigningModel.PROP_KEYCRYPTSECRET);
              final Serializable decryptedPropertyValue =
                  metadataEncryptor.decrypt(
                      SigningModel.PROP_KEYCRYPTSECRET, encryptedPropertyValue);

              // Decrypt key content
              InputStream decryptedKeyContent;
              try {
                decryptedKeyContent =
                    CryptUtils.decrypt(
                        decryptedPropertyValue.toString(),
                        keyContentReader.getContentInputStream());
              } catch (Throwable e) {
                log.error(e);
                throw new AlfrescoRuntimeException(e.getMessage(), e);
              }

              ks.load(
                  new ByteArrayInputStream(IOUtils.toByteArray(decryptedKeyContent)),
                  signingDTO.getKeyPassword().toCharArray());

              final String alias =
                  (String)
                      nodeService.getProperty(signingDTO.getKeyFile(), SigningModel.PROP_KEYALIAS);

              final PrivateKey key =
                  (PrivateKey) ks.getKey(alias, signingDTO.getKeyPassword().toCharArray());
              final Certificate[] chain = ks.getCertificateChain(alias);

              final Iterator<NodeRef> itFilesToSign = signingDTO.getFilesToSign().iterator();
              while (itFilesToSign.hasNext()) {
                final NodeRef nodeRefToSign = itFilesToSign.next();
                final AlfrescoRuntimeException exception =
                    signFile(nodeRefToSign, signingDTO, alfTempDir, alias, ks, key, chain);
                if (exception != null) {
                  // Error on the file process
                  errors.add(exception);
                }
              }

              if (errors != null && errors.size() > 0) {
                final StringBuffer allErrors = new StringBuffer();
                final Iterator<AlfrescoRuntimeException> itErrors = errors.iterator();
                if (errors.size() > 1) {
                  allErrors.append("\n");
                }
                while (itErrors.hasNext()) {
                  final AlfrescoRuntimeException alfrescoRuntimeException = itErrors.next();
                  allErrors.append(alfrescoRuntimeException.getMessage());
                  if (itErrors.hasNext()) {
                    allErrors.append("\n");
                  }
                }
                throw new RuntimeException(allErrors.toString());
              }

            } else {
              log.error("Unable to get key content, key type or key password.");
              throw new AlfrescoRuntimeException(
                  "Unable to get key content, key type or key password.");
            }
          }
        } else {
          log.error("Unable to get temporary directory.");
          throw new AlfrescoRuntimeException("Unable to get temporary directory.");
        }
      } catch (KeyStoreException e) {
        log.error(e);
        throw new AlfrescoRuntimeException(e.getMessage(), e);
      } catch (NoSuchAlgorithmException e) {
        log.error(e);
        throw new AlfrescoRuntimeException(e.getMessage(), e);
      } catch (CertificateException e) {
        log.error(e);
        throw new AlfrescoRuntimeException(e.getMessage(), e);
      } catch (IOException e) {
        log.error(e);
        throw new AlfrescoRuntimeException(e.getMessage(), e);
      } catch (UnrecoverableKeyException e) {
        log.error(e);
        throw new AlfrescoRuntimeException(e.getMessage(), e);
      }
    } else {
      log.error("No object with signing informations.");
      throw new AlfrescoRuntimeException("No object with signing informations.");
    }
  }
  public void testEagerCleanupOnCommit() throws Exception {
    eagerCleaner.setEagerOrphanCleanup(true);
    // Create a new file
    RetryingTransactionCallback<NodeRef> makeContentCallback =
        new RetryingTransactionCallback<NodeRef>() {
          public NodeRef execute() throws Throwable {
            // Create some content
            StoreRef storeRef =
                nodeService.createStore("test", "testEagerCleanupOnCommit-" + GUID.generate());
            NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
            Map<QName, Serializable> properties =
                Collections.singletonMap(ContentModel.PROP_NAME, (Serializable) "test.txt");
            NodeRef contentNodeRef =
                nodeService
                    .createNode(
                        rootNodeRef,
                        ContentModel.ASSOC_CHILDREN,
                        ContentModel.ASSOC_CHILDREN,
                        ContentModel.TYPE_CONTENT,
                        properties)
                    .getChildRef();
            ContentWriter writer =
                contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
            writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
            writer.putContent("INITIAL CONTENT");
            // Done
            return contentNodeRef;
          }
        };
    final NodeRef contentNodeRef =
        transactionService.getRetryingTransactionHelper().doInTransaction(makeContentCallback);
    ContentReader contentReader =
        contentService.getReader(contentNodeRef, ContentModel.PROP_CONTENT);
    assertTrue("Expect content to exist", contentReader.exists());

    // Now update the node, but force a failure i.e. txn rollback
    final List<String> newContentUrls = new ArrayList<String>();
    RetryingTransactionCallback<String> failUpdateCallback =
        new RetryingTransactionCallback<String>() {
          public String execute() throws Throwable {
            ContentWriter writer =
                contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
            writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
            writer.putContent("CONTENT FOR FAIL");
            // This will have updated the metadata, so we can fail now
            newContentUrls.add(writer.getContentUrl());
            // Done
            throw new RuntimeException("FAIL");
          }
        };
    try {
      transactionService.getRetryingTransactionHelper().doInTransaction(failUpdateCallback);
      fail("Transaction was meant to fail");
    } catch (RuntimeException e) {
      if (e.getMessage().equals("FAIL")) {
        // Expected
      } else {
        // Ooops
        throw e;
      }
    }
    // Make sure that the new content is not there
    // The original content must still be there
    assertEquals("Expected one content URL to play with", 1, newContentUrls.size());
    ContentReader readerMissing = contentService.getRawReader(newContentUrls.get(0));
    assertFalse("Newly created content should have been removed.", readerMissing.exists());
    assertTrue("Original content should still be there.", contentReader.exists());

    // Now update the node successfully
    RetryingTransactionCallback<String> successUpdateCallback =
        new RetryingTransactionCallback<String>() {
          public String execute() throws Throwable {
            ContentWriter writer =
                contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
            writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
            writer.putContent("CONTENT FOR SUCCESS");
            // Done
            return writer.getContentUrl();
          }
        };
    String newContentUrl =
        transactionService.getRetryingTransactionHelper().doInTransaction(successUpdateCallback);
    // Make sure that the new content is there
    // The original content was disposed of
    ContentReader contentReaderNew = contentService.getRawReader(newContentUrl);
    assertTrue("Newly created content should be present.", contentReaderNew.exists());
    assertFalse("Original content should have been removed.", contentReader.exists());

    // Now delete the node
    RetryingTransactionCallback<Object> deleteNodeCallback =
        new RetryingTransactionCallback<Object>() {
          public Object execute() throws Throwable {
            nodeService.deleteNode(contentNodeRef);
            // Done
            return null;
          }
        };
    transactionService.getRetryingTransactionHelper().doInTransaction(deleteNodeCallback);
    // The new content must have disappeared
    assertFalse("Newly created content should be removed.", contentReaderNew.exists());
  }
  private AlfrescoRuntimeException signFile(
      final NodeRef nodeRefToSign,
      final DigitalSigningDTO signingDTO,
      final File alfTempDir,
      final String alias,
      final KeyStore ks,
      final PrivateKey key,
      final Certificate[] chain) {
    final String fileNameToSign = fileFolderService.getFileInfo(nodeRefToSign).getName();

    File fileConverted = null;
    File tempDir = null;
    try {
      ContentReader fileToSignContentReader = getReader(nodeRefToSign);

      if (fileToSignContentReader != null) {
        String newName = null;

        // Check if document is PDF or transform it
        if (!MimetypeMap.MIMETYPE_PDF.equals(fileToSignContentReader.getMimetype())) {
          // Transform document in PDF document
          final ContentTransformer tranformer =
              contentTransformerRegistry.getTransformer(
                  fileToSignContentReader.getMimetype(),
                  fileToSignContentReader.getSize(),
                  MimetypeMap.MIMETYPE_PDF,
                  new TransformationOptions());

          if (tranformer != null) {

            tempDir = new File(alfTempDir.getPath() + File.separatorChar + nodeRefToSign.getId());
            if (tempDir != null) {
              tempDir.mkdir();
              fileConverted =
                  new File(tempDir, fileNameToSign + "_" + System.currentTimeMillis() + ".pdf");
              if (fileConverted != null) {
                final ContentWriter newDoc = new FileContentWriter(fileConverted);
                if (newDoc != null) {
                  newDoc.setMimetype(MimetypeMap.MIMETYPE_PDF);
                  tranformer.transform(fileToSignContentReader, newDoc);
                  fileToSignContentReader = new FileContentReader(fileConverted);

                  final String originalName =
                      (String) nodeService.getProperty(nodeRefToSign, ContentModel.PROP_NAME);

                  newName = originalName.substring(0, originalName.lastIndexOf(".")) + ".pdf";
                }
              }
            }
          } else {
            log.error(
                "["
                    + fileNameToSign
                    + "] No suitable converter found to convert the document in PDF.");
            return new AlfrescoRuntimeException(
                "["
                    + fileNameToSign
                    + "] No suitable converter found to convert the document in PDF.");
          }
        }

        // Convert PDF in PDF/A format
        final File pdfAFile = convertPdfToPdfA(fileToSignContentReader.getContentInputStream());

        final PdfReader reader = new PdfReader(new FileInputStream(pdfAFile));

        if (nodeRefToSign != null) {
          tempDir = new File(alfTempDir.getPath() + File.separatorChar + nodeRefToSign.getId());
          if (tempDir != null) {
            tempDir.mkdir();
            final File file = new File(tempDir, fileNameToSign);

            if (file != null) {
              final FileOutputStream fout = new FileOutputStream(file);
              final PdfStamper stp = PdfStamper.createSignature(reader, fout, '\0');

              if (stp != null) {
                final PdfSignatureAppearance sap = stp.getSignatureAppearance();
                if (sap != null) {
                  sap.setCrypto(key, chain, null, PdfSignatureAppearance.WINCER_SIGNED);
                  sap.setReason(signingDTO.getSignReason());
                  sap.setLocation(signingDTO.getSignLocation());
                  sap.setContact(signingDTO.getSignContact());
                  sap.setCertificationLevel(PdfSignatureAppearance.CERTIFIED_NO_CHANGES_ALLOWED);
                  sap.setImageScale(1);

                  // digital signature
                  if (signingDTO.getSigningField() != null
                      && !signingDTO.getSigningField().trim().equalsIgnoreCase("")) {
                    Image img = null;
                    if (signingDTO.getImage() != null) {
                      final ContentReader imageContentReader = getReader(signingDTO.getImage());
                      final AcroFields af = reader.getAcroFields();
                      if (af != null) {
                        final List<FieldPosition> positions =
                            af.getFieldPositions(signingDTO.getSigningField());
                        if (positions != null
                            && positions.size() > 0
                            && positions.get(0) != null
                            && positions.get(0).position != null) {
                          final BufferedImage newImg =
                              scaleImage(
                                  ImageIO.read(imageContentReader.getContentInputStream()),
                                  BufferedImage.TYPE_INT_RGB,
                                  Float.valueOf(positions.get(0).position.getWidth()).intValue(),
                                  Float.valueOf(positions.get(0).position.getHeight()).intValue());
                          img = Image.getInstance(newImg, null);
                        } else {
                          log.error(
                              "["
                                  + fileNameToSign
                                  + "] The field '"
                                  + signingDTO.getSigningField()
                                  + "' doesn't exist in the document.");
                          return new AlfrescoRuntimeException(
                              "["
                                  + fileNameToSign
                                  + "] The field '"
                                  + signingDTO.getSigningField()
                                  + "' doesn't exist in the document.");
                        }
                      }
                      if (img == null) {
                        img =
                            Image.getInstance(
                                ImageIO.read(imageContentReader.getContentInputStream()), null);
                      }
                      sap.setImage(img);
                    }
                    sap.setVisibleSignature(signingDTO.getSigningField());
                  } else {
                    int pageToSign = 1;
                    if (DigitalSigningDTO.PAGE_LAST.equalsIgnoreCase(
                        signingDTO.getPages().trim())) {
                      pageToSign = reader.getNumberOfPages();
                    } else if (DigitalSigningDTO.PAGE_SPECIFIC.equalsIgnoreCase(
                        signingDTO.getPages().trim())) {
                      if (signingDTO.getPageNumber() > 0
                          && signingDTO.getPageNumber() <= reader.getNumberOfPages()) {
                        pageToSign = signingDTO.getPageNumber();
                      } else {
                        throw new AlfrescoRuntimeException("Page number is out of bound.");
                      }
                    }
                    if (signingDTO.getImage() != null) {
                      final ContentReader imageContentReader = getReader(signingDTO.getImage());
                      // Resize image
                      final BufferedImage newImg =
                          scaleImage(
                              ImageIO.read(imageContentReader.getContentInputStream()),
                              BufferedImage.TYPE_INT_RGB,
                              signingDTO.getSignWidth(),
                              signingDTO.getSignHeight());
                      final Image img = Image.getInstance(newImg, null);
                      sap.setImage(img);
                    }
                    if (signingDTO.getPosition() != null
                        && !DigitalSigningDTO.POSITION_CUSTOM.equalsIgnoreCase(
                            signingDTO.getPosition().trim())) {
                      final Rectangle pageRect = reader.getPageSizeWithRotation(1);
                      sap.setVisibleSignature(
                          positionSignature(
                              signingDTO.getPosition(),
                              pageRect,
                              signingDTO.getSignWidth(),
                              signingDTO.getSignHeight(),
                              signingDTO.getxMargin(),
                              signingDTO.getyMargin()),
                          pageToSign,
                          null);
                    } else {
                      sap.setVisibleSignature(
                          new Rectangle(
                              signingDTO.getLocationX(),
                              signingDTO.getLocationY(),
                              signingDTO.getLocationX() + signingDTO.getSignWidth(),
                              signingDTO.getLocationY() - signingDTO.getSignHeight()),
                          pageToSign,
                          null);
                    }
                  }
                  stp.close();

                  NodeRef destinationNode = null;
                  NodeRef originalDoc = null;
                  boolean addAsNewVersion = false;
                  if (signingDTO.getDestinationFolder() == null) {
                    destinationNode = nodeRefToSign;
                    nodeService.addAspect(destinationNode, ContentModel.ASPECT_VERSIONABLE, null);
                    addAsNewVersion = true;
                  } else {
                    originalDoc = nodeRefToSign;
                    destinationNode =
                        createDestinationNode(
                            file.getName(), signingDTO.getDestinationFolder(), nodeRefToSign);
                  }

                  if (destinationNode != null) {

                    final ContentWriter writer =
                        contentService.getWriter(destinationNode, ContentModel.PROP_CONTENT, true);
                    if (writer != null) {
                      writer.setEncoding(fileToSignContentReader.getEncoding());
                      writer.setMimetype("application/pdf");
                      writer.putContent(file);
                      file.delete();

                      if (fileConverted != null) {
                        fileConverted.delete();
                      }

                      nodeService.addAspect(
                          destinationNode,
                          SigningModel.ASPECT_SIGNED,
                          new HashMap<QName, Serializable>());
                      nodeService.setProperty(
                          destinationNode, SigningModel.PROP_REASON, signingDTO.getSignReason());
                      nodeService.setProperty(
                          destinationNode,
                          SigningModel.PROP_LOCATION,
                          signingDTO.getSignLocation());
                      nodeService.setProperty(
                          destinationNode, SigningModel.PROP_SIGNATUREDATE, new java.util.Date());
                      nodeService.setProperty(
                          destinationNode,
                          SigningModel.PROP_SIGNEDBY,
                          AuthenticationUtil.getRunAsUser());

                      if (newName != null) {
                        nodeService.setProperty(destinationNode, ContentModel.PROP_NAME, newName);
                      }

                      final X509Certificate c = (X509Certificate) ks.getCertificate(alias);
                      nodeService.setProperty(
                          destinationNode, SigningModel.PROP_VALIDITY, c.getNotAfter());
                      nodeService.setProperty(
                          destinationNode, SigningModel.PROP_ORIGINAL_DOC, originalDoc);

                      if (!addAsNewVersion) {
                        if (!nodeService.hasAspect(originalDoc, SigningModel.ASPECT_ORIGINAL_DOC)) {
                          nodeService.addAspect(
                              originalDoc,
                              SigningModel.ASPECT_ORIGINAL_DOC,
                              new HashMap<QName, Serializable>());
                        }
                        nodeService.createAssociation(
                            originalDoc, destinationNode, SigningModel.PROP_RELATED_DOC);
                      }
                    }
                  } else {
                    log.error("[" + fileNameToSign + "] Destination node is not a valid NodeRef.");
                    return new AlfrescoRuntimeException(
                        "[" + fileNameToSign + "] Destination node is not a valid NodeRef.");
                  }
                } else {
                  log.error("[" + fileNameToSign + "] Unable to get PDF appearance signature.");
                  return new AlfrescoRuntimeException(
                      "[" + fileNameToSign + "] Unable to get PDF appearance signature.");
                }
              } else {
                log.error("[" + fileNameToSign + "] Unable to create PDF signature.");
                return new AlfrescoRuntimeException(
                    "[" + fileNameToSign + "] Unable to create PDF signature.");
              }
            }
          }
        } else {
          log.error("[" + fileNameToSign + "] Unable to get document to sign content.");
          return new AlfrescoRuntimeException(
              "[" + fileNameToSign + "] Unable to get document to sign content.");
        }

        if (pdfAFile != null) {
          pdfAFile.delete();
        }

        return null;

      } else {
        log.error("[" + fileNameToSign + "] The document has no content.");
        return new AlfrescoRuntimeException(
            "[" + fileNameToSign + "] The document has no content.");
      }
    } catch (KeyStoreException e) {
      log.error("[" + fileNameToSign + "] " + e);
      return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + e.getMessage(), e);
    } catch (ContentIOException e) {
      log.error("[" + fileNameToSign + "] " + e);
      return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + e.getMessage(), e);
    } catch (IOException e) {
      log.error("[" + fileNameToSign + "] " + e);
      return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + e.getMessage(), e);
    } catch (DocumentException e) {
      log.error("[" + fileNameToSign + "] " + e);
      return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + e.getMessage(), e);
    } finally {
      if (tempDir != null) {
        try {
          tempDir.delete();
        } catch (Exception ex) {
          log.error("[" + fileNameToSign + "] " + ex);
          return new AlfrescoRuntimeException("[" + fileNameToSign + "] " + ex.getMessage(), ex);
        }
      }
    }
  }
  /**
   * @see
   *     org.alfresco.repo.content.transform.AbstractContentTransformer2#transformInternal(org.alfresco.service.cmr.repository.ContentReader,
   *     org.alfresco.service.cmr.repository.ContentWriter,
   *     org.alfresco.service.cmr.repository.TransformationOptions)
   */
  @Override
  public void transformInternal(
      ContentReader reader, ContentWriter writer, TransformationOptions options) throws Exception {
    final String outputMimetype = writer.getMimetype();
    final String outputFileExt = getMimetypeService().getExtension(outputMimetype);

    // We need to keep a reference to thrown exceptions as we're going to catch them and
    // then move on to the next transformer. In the event that they all fail, we will throw
    // the final exception.
    Exception transformationException = null;

    for (int i = 0; i < transformers.size(); i++) {
      int oneBasedCount = i + 1;
      ContentTransformer transf = transformers.get(i);
      ContentWriter currentWriter = null;
      File tempFile = null;
      try {
        if (logger.isDebugEnabled()) {
          logger.debug(
              "Transformation attempt "
                  + oneBasedCount
                  + " of "
                  + transformers.size()
                  + ": "
                  + transf);
        }

        if (!transf.isTransformable(
            reader.getMimetype(), reader.getSize(), outputMimetype, options)) {
          throw new UnsupportedTransformationException(
              "Unsupported transformation: " + reader.getMimetype() + " to " + outputMimetype);
        }

        // We can't know in advance which transformer in the sequence will work - if any.
        // Therefore we can't write into the ContentWriter stream.
        // So make a temporary file writer with the current transformer name.
        tempFile =
            TempFileProvider.createTempFile(
                "FailoverTransformer_intermediate_" + transf.getClass().getSimpleName() + "_",
                "." + outputFileExt);
        currentWriter = new FileContentWriter(tempFile);
        currentWriter.setMimetype(outputMimetype);
        currentWriter.setEncoding(writer.getEncoding());

        // attempt to transform
        transf.transform(reader, currentWriter, options);

        // TODO Could add a check for zero-length output and treat that as a failure
        // final long writtenSize = currentWriter.getSize();
      } catch (Exception are) {
        if (transformationException == null) {
          transformationException = are;
        }

        if (logger.isDebugEnabled()) {
          logger.debug("Transformation " + oneBasedCount + " was unsuccessful.");
          if (i != transformers.size() - 1) {
            // We don't log the last exception as we're going to throw it.
            logger.debug("The below exception is provided for information purposes only.", are);
          }
        }

        // Set a new reader to refresh the input stream.
        reader = reader.getReader();
        // and move to the next transformer
        continue;
      }
      // No need to close input or output streams

      // At this point the current transformation was successful i.e. it did not throw an exception.

      // Now we must copy the content from the temporary file into the ContentWriter stream.
      if (tempFile != null) {
        writer.putContent(tempFile);
      }

      if (logger.isInfoEnabled()) {
        logger.info("Transformation was successful");
      }
      return;
    }
    // At this point we have tried all transformers in the sequence without apparent success.
    if (transformationException != null) {
      transformerDebug.debug("          No more transformations to failover to");
      if (logger.isDebugEnabled()) {
        logger.debug(
            "All transformations were unsuccessful. Throwing first exception.",
            transformationException);
      }
      throw transformationException;
    }
  }
  /** Test checkIn */
  public void testCheckIn() {
    NodeRef workingCopy = checkout();

    // Test standard check-in
    Map<String, Serializable> versionProperties = new HashMap<String, Serializable>();
    versionProperties.put(Version.PROP_DESCRIPTION, "This is a test version");
    cociService.checkin(workingCopy, versionProperties);

    // Test check-in with content
    NodeRef workingCopy3 = checkout();

    nodeService.setProperty(workingCopy3, PROP_NAME_QNAME, TEST_VALUE_2);
    nodeService.setProperty(workingCopy3, PROP2_QNAME, TEST_VALUE_3);
    ContentWriter tempWriter =
        this.contentService.getWriter(workingCopy3, ContentModel.PROP_CONTENT, false);
    assertNotNull(tempWriter);
    tempWriter.putContent(CONTENT_2);
    String contentUrl = tempWriter.getContentUrl();
    Map<String, Serializable> versionProperties3 = new HashMap<String, Serializable>();
    versionProperties3.put(Version.PROP_DESCRIPTION, "description");
    versionProperties3.put(VersionModel.PROP_VERSION_TYPE, VersionType.MAJOR);
    NodeRef origNodeRef = cociService.checkin(workingCopy3, versionProperties3, contentUrl, true);
    assertNotNull(origNodeRef);

    // Check the checked in content
    ContentReader contentReader =
        this.contentService.getReader(origNodeRef, ContentModel.PROP_CONTENT);
    assertNotNull(contentReader);
    assertEquals(CONTENT_2, contentReader.getContentString());

    // Check that the version history is correct
    Version version = this.versionService.getCurrentVersion(origNodeRef);
    assertNotNull(version);
    assertEquals("description", version.getDescription());
    assertEquals(VersionType.MAJOR, version.getVersionType());
    NodeRef versionNodeRef = version.getFrozenStateNodeRef();
    assertNotNull(versionNodeRef);

    // Check the verioned content
    ContentReader versionContentReader =
        this.contentService.getReader(versionNodeRef, ContentModel.PROP_CONTENT);
    assertNotNull(versionContentReader);
    assertEquals(CONTENT_2, versionContentReader.getContentString());

    // Check that the name is not updated during the check-in
    assertEquals(TEST_VALUE_2, nodeService.getProperty(versionNodeRef, PROP_NAME_QNAME));
    assertEquals(TEST_VALUE_2, nodeService.getProperty(origNodeRef, PROP_NAME_QNAME));

    // Check that the other properties are updated during the check-in
    assertEquals(TEST_VALUE_3, nodeService.getProperty(versionNodeRef, PROP2_QNAME));
    assertEquals(TEST_VALUE_3, nodeService.getProperty(origNodeRef, PROP2_QNAME));

    // Cancel the check out after is has been left checked out
    cociService.cancelCheckout(workingCopy3);

    // Test keep checked out flag
    NodeRef workingCopy2 = checkout();
    Map<String, Serializable> versionProperties2 = new HashMap<String, Serializable>();
    versionProperties2.put(Version.PROP_DESCRIPTION, "Another version test");
    this.cociService.checkin(workingCopy2, versionProperties2, null, true);
    this.cociService.checkin(workingCopy2, new HashMap<String, Serializable>(), null, true);
  }
Exemple #23
0
  /** Upload an encoded Base64 file to the repository and decode it back once it's uploaded. */
  @Override
  protected Map<String, Object> executeImpl(WebScriptRequest req, Status status, Cache cache) {
    HashMap<String, Object> model = new HashMap<String, Object>();
    UserTransaction trx = serviceRegistry.getTransactionService().getUserTransaction();

    try {
      trx.begin();
      System.out.println(trx.hashCode());
      Element args = Arguments.getArguments(req);
      String username =
          args.getElementsByTagName(FORM_PARAM_USERNAME).item(0).getFirstChild().getNodeValue();

      model.put(FTL_USERNAME, username);
      Impersonate.impersonate(username);

      String ref = DocumentUtils.pujarDocumentBase64(req, args, username).trim();
      NodeRef nodeRef = new NodeRef(ref);

      Map<QName, Serializable> props = serviceRegistry.getNodeService().getProperties(nodeRef);
      ContentReader reader =
          serviceRegistry.getContentService().getReader(nodeRef, ContentModel.PROP_CONTENT);
      byte[] contentDecoded =
          es.mityc.firmaJava.libreria.utilidades.Base64.decode(reader.getContentString());
      ContentWriter writer =
          serviceRegistry.getContentService().getWriter(nodeRef, ContentModel.PROP_CONTENT, true);
      writer.putContent(new ByteArrayInputStream(contentDecoded));

      serviceRegistry.getOwnableService().setOwner(nodeRef, username);

      Context cx = Context.enter();
      Scriptable scope = cx.initStandardObjects();
      ScriptNode document = new ScriptNode(nodeRef, serviceRegistry, scope);

      model.put(FTL_DOCUMENT, document);
      model.put(FTL_SUCCESS, String.valueOf(true));

      // Auditar creación de documento
      String type = document.getTypeShort();
      String site = document.getSiteShortName();
      if (type.equals("udl:documentSimple")) {
        AuditUdl.auditRecord(
            auditComponent,
            username,
            document.getNodeRef().toString(),
            AUDIT_ACTION_CREATE_DOCUMENT_SIMPLE,
            type,
            site);
        QName qNameIdDocSimple =
            QName.createQName(
                "http://www.smile.com/model/udl/1.0", "secuencial_identificador_documentSimple");
        String idDocSimple =
            (String) serviceRegistry.getNodeService().getProperty(nodeRef, qNameIdDocSimple);

        if ("".equals(idDocSimple) || idDocSimple == null) {
          // serviceRegistry.getNodeService().deleteNode(nodeRef);
          throw new Exception("Error obtenint identificador via WebService.");
        }
      }

      trx.commit();

    } catch (Exception e) {
      e.printStackTrace();
      model.put(FTL_ERROR, e.getMessage());
      model.put(FTL_SUCCESS, String.valueOf(false));

      try {
        if (trx.getStatus() == javax.transaction.Status.STATUS_ACTIVE) {
          System.out.println(trx.hashCode());
          trx.rollback();
        }
      } catch (SystemException ex) {
        e.printStackTrace();
      }
    }

    return model;
  }
  /**
   * Streams the content on a given node's content property to the response of the web script.
   *
   * @param req Request
   * @param res Response
   * @param nodeRef The node reference
   * @param propertyQName The content property name
   * @param attach Indicates whether the content should be streamed as an attachment or not
   * @param attachFileName Optional file name to use when attach is <code>true</code>
   * @throws IOException
   */
  public void streamContent(
      WebScriptRequest req,
      WebScriptResponse res,
      NodeRef nodeRef,
      QName propertyQName,
      boolean attach,
      String attachFileName,
      Map<String, Object> model)
      throws IOException {
    if (logger.isDebugEnabled())
      logger.debug(
          "Retrieving content from node ref "
              + nodeRef.toString()
              + " (property: "
              + propertyQName.toString()
              + ") (attach: "
              + attach
              + ")");

    // TODO
    // This was commented out to accomadate records management permissions.  We need to review how
    // we cope with this
    // hard coded permission checked.

    // check that the user has at least READ_CONTENT access - else redirect to the login page
    //        if (permissionService.hasPermission(nodeRef, PermissionService.READ_CONTENT) ==
    // AccessStatus.DENIED)
    //        {
    //            throw new WebScriptException(HttpServletResponse.SC_FORBIDDEN, "Permission
    // denied");
    //        }

    // check If-Modified-Since header and set Last-Modified header as appropriate
    Date modified = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED);
    if (modified != null) {
      long modifiedSince = -1;
      String modifiedSinceStr = req.getHeader("If-Modified-Since");
      if (modifiedSinceStr != null) {
        try {
          modifiedSince = dateFormat.parse(modifiedSinceStr).getTime();
        } catch (Throwable e) {
          if (logger.isInfoEnabled())
            logger.info(
                "Browser sent badly-formatted If-Modified-Since header: " + modifiedSinceStr);
        }

        if (modifiedSince > 0L) {
          // round the date to the ignore millisecond value which is not supplied by header
          long modDate = (modified.getTime() / 1000L) * 1000L;
          if (modDate <= modifiedSince) {
            res.setStatus(HttpServletResponse.SC_NOT_MODIFIED);
            return;
          }
        }
      }
    }

    // get the content reader
    ContentReader reader = contentService.getReader(nodeRef, propertyQName);
    if (reader == null || !reader.exists()) {
      throw new WebScriptException(
          HttpServletResponse.SC_NOT_FOUND,
          "Unable to locate content for node ref "
              + nodeRef
              + " (property: "
              + propertyQName.toString()
              + ")");
    }

    // Stream the content
    streamContentImpl(
        req,
        res,
        reader,
        nodeRef,
        propertyQName,
        attach,
        modified,
        modified == null ? null : String.valueOf(modified.getTime()),
        attachFileName,
        model);
  }
  public List<VerifyResultDTO> verifySign(final VerifyingDTO verifyingDTO) {
    final List<VerifyResultDTO> result = new ArrayList<VerifyResultDTO>();
    try {
      if (verifyingDTO != null) {
        final String keyType =
            (String) nodeService.getProperty(verifyingDTO.getKeyFile(), SigningModel.PROP_KEYTYPE);

        final KeyStore ks = KeyStore.getInstance(keyType);
        final ContentReader keyContentReader = getReader(verifyingDTO.getKeyFile());
        if (keyContentReader != null && ks != null && verifyingDTO.getKeyPassword() != null) {

          // Get crypted secret key and decrypt it
          final Serializable encryptedPropertyValue =
              nodeService.getProperty(verifyingDTO.getKeyFile(), SigningModel.PROP_KEYCRYPTSECRET);
          final Serializable decryptedPropertyValue =
              metadataEncryptor.decrypt(SigningModel.PROP_KEYCRYPTSECRET, encryptedPropertyValue);

          // Decrypt key content
          final InputStream decryptedKeyContent =
              CryptUtils.decrypt(
                  decryptedPropertyValue.toString(), keyContentReader.getContentInputStream());

          ks.load(
              new ByteArrayInputStream(IOUtils.toByteArray(decryptedKeyContent)),
              verifyingDTO.getKeyPassword().toCharArray());

          final ContentReader fileToVerifyContentReader = getReader(verifyingDTO.getFileToVerify());
          if (fileToVerifyContentReader != null) {
            final PdfReader reader =
                new PdfReader(fileToVerifyContentReader.getContentInputStream());
            if (reader != null) {
              final AcroFields af = reader.getAcroFields();
              if (af != null) {
                final ArrayList<String> names = af.getSignatureNames();
                if (names != null) {
                  for (int k = 0; k < names.size(); ++k) {
                    final VerifyResultDTO verifyResultDTO = new VerifyResultDTO();
                    final String name = (String) names.get(k);
                    verifyResultDTO.setName(name);
                    verifyResultDTO.setSignatureCoversWholeDocument(
                        af.signatureCoversWholeDocument(name));
                    verifyResultDTO.setRevision(af.getRevision(name));
                    verifyResultDTO.setTotalRevision(af.getTotalRevisions());

                    final PdfPKCS7 pk = af.verifySignature(name);
                    if (pk != null) {
                      final Calendar cal = pk.getSignDate();
                      final Certificate[] pkc = pk.getCertificates();
                      Object fails[] = PdfPKCS7.verifyCertificates(pkc, ks, null, cal);
                      if (fails == null) {
                        verifyResultDTO.setIsSignValid(true);
                      } else {
                        verifyResultDTO.setIsSignValid(false);
                        verifyResultDTO.setFailReason(fails[1]);
                      }
                      verifyResultDTO.setSignSubject(
                          PdfPKCS7.getSubjectFields(pk.getSigningCertificate()).toString());
                      verifyResultDTO.setIsDocumentModified(!pk.verify());
                      verifyResultDTO.setSignDate(pk.getSignDate());
                      verifyResultDTO.setSignLocation(pk.getLocation());
                      verifyResultDTO.setSignInformationVersion(pk.getSigningInfoVersion());
                      verifyResultDTO.setSignReason(pk.getReason());
                      verifyResultDTO.setSignVersion(pk.getVersion());
                      verifyResultDTO.setSignName(pk.getSignName());

                      result.add(verifyResultDTO);
                    } else {
                      log.error("Unable to verify signature.");
                      throw new AlfrescoRuntimeException("Unable to verify signature.");
                    }
                  }
                } else {
                  log.error("Unable to get signature names.");
                  throw new AlfrescoRuntimeException("Unable to get signature names.");
                }
              } else {
                log.error("Unable to get PDF fields.");
                throw new AlfrescoRuntimeException("Unable to get PDF fields.");
              }
            }
          } else {
            log.error("Unable to get document to verify content.");
            throw new AlfrescoRuntimeException("Unable to get document to verify content.");
          }
        } else {
          log.error("Unable to get key content, key type or key password.");
          throw new AlfrescoRuntimeException(
              "Unable to get key content, key type or key password.");
        }
      } else {
        log.error("No object with verification informations.");
        throw new AlfrescoRuntimeException("No object with verification informations.");
      }
    } catch (KeyStoreException e) {
      log.error(e);
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } catch (ContentIOException e) {
      log.error(e);
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } catch (NoSuchAlgorithmException e) {
      log.error(e);
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } catch (CertificateException e) {
      log.error(e);
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } catch (IOException e) {
      log.error(e);
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } catch (GeneralSecurityException e) {
      log.error(e);
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    } catch (Throwable e) {
      log.error(e);
      throw new AlfrescoRuntimeException(e.getMessage(), e);
    }

    return result;
  }
  /**
   * Send an email message
   *
   * @throws AlfrescoRuntimeExeption
   */
  @SuppressWarnings("unchecked")
  @Override
  protected void executeImpl(final Action ruleAction, final NodeRef actionedUponNodeRef) {
    try {
      MimeMessage message = javaMailSender.createMimeMessage();
      // use the true flag to indicate you need a multipart message
      MimeMessageHelper helper = new MimeMessageHelper(message, true);

      // set recipient
      String to = (String) ruleAction.getParameterValue(PARAM_TO);
      if (to != null && to.length() != 0) {
        helper.setTo(to);
      } else {
        // see if multiple recipients have been supplied - as a list of
        // authorities
        Serializable toManyMails = ruleAction.getParameterValue(PARAM_TO_MANY);
        List<String> recipients = new ArrayList<String>();
        if (toManyMails instanceof List) {
          for (String mailAdress : (List<String>) toManyMails) {
            if (validateAddress(mailAdress)) {
              recipients.add(mailAdress);
            }
          }
        } else if (toManyMails instanceof String) {
          if (validateAddress((String) toManyMails)) {
            recipients.add((String) toManyMails);
          }
        }
        if (recipients != null && recipients.size() > 0) {
          helper.setTo(recipients.toArray(new String[recipients.size()]));
        } else {
          // No recipients have been specified
          logger.error("No recipient has been specified for the mail action");
        }
      }

      // set subject line
      helper.setSubject((String) ruleAction.getParameterValue(PARAM_SUBJECT));

      // See if an email template has been specified
      String text = null;
      NodeRef templateRef = (NodeRef) ruleAction.getParameterValue(PARAM_TEMPLATE);
      if (templateRef != null) {
        // build the email template model
        Map<String, Object> model = createEmailTemplateModel(actionedUponNodeRef, ruleAction);

        // process the template against the model
        text = templateService.processTemplate("freemarker", templateRef.toString(), model);
      }

      // set the text body of the message
      if (text == null) {
        text = (String) ruleAction.getParameterValue(PARAM_TEXT);
      }
      // adding the boolean true to send as HTML
      helper.setText(text, true);
      FileFolderService fileFolderService = serviceRegistry.getFileFolderService();
      /* add inline images.
       * "action.parameters.images is a ,-delimited string, containing a map of images and resources, from this example:
      message.setText("my text <img src='cid:myLogo'>", true);
      message.addInline("myLogo", new ClassPathResource("img/mylogo.gif"));
      so the "images" param can look like this: headerLogo|images/headerLogoNodeRef,footerLogo|footerLogoNodeRef
       */
      String imageList = (String) ruleAction.getParameterValue(PARAM_IMAGES);
      System.out.println(imageList);
      String[] imageMap = imageList.split(","); // comma no spaces
      Map<String, String> images = new HashMap<String, String>();
      for (String image : imageMap) {
        System.out.println(image);
        String map[] = image.split("\\|");
        for (String key : map) {
          System.out.println(key);
        }

        System.out.println(map.length);

        images.put(map[0].trim(), map[1].trim());
        System.out.println(images.size());
        System.out.println("-" + map[0] + " " + map[1] + "-");
      }
      NodeRef imagesFolderNodeRef = (NodeRef) ruleAction.getParameterValue(PARAM_IMAGES_FOLDER);
      if (null != imagesFolderNodeRef) {
        ContentService contentService = serviceRegistry.getContentService();
        System.out.println("mapping");
        for (Map.Entry<String, String> entry : images.entrySet()) {
          System.out.println(
              entry.getKey()
                  + " "
                  + entry.getValue()
                  + " "
                  + ruleAction.getParameterValue(PARAM_IMAGES_FOLDER));
          NodeRef imageFile = fileFolderService.searchSimple(imagesFolderNodeRef, entry.getValue());
          if (null != imageFile) {
            ContentReader reader = contentService.getReader(imageFile, ContentModel.PROP_CONTENT);
            ByteArrayResource resource =
                new ByteArrayResource(IOUtils.toByteArray(reader.getContentInputStream()));
            helper.addInline(entry.getKey(), resource, reader.getMimetype());

          } else {
            logger.error("No image for " + entry.getKey());
          }
        }
      } else {
        logger.error("No images folder");
      }

      // set the from address
      NodeRef person = personService.getPerson(authService.getCurrentUserName());

      String fromActualUser = null;
      if (person != null) {
        fromActualUser = (String) nodeService.getProperty(person, ContentModel.PROP_EMAIL);
      }
      if (fromActualUser != null && fromActualUser.length() != 0) {
        helper.setFrom(fromActualUser);
      } else {
        String from = (String) ruleAction.getParameterValue(PARAM_FROM);
        if (from == null || from.length() == 0) {
          helper.setFrom(fromAddress);
        } else {
          helper.setFrom(from);
        }
      }
      NodeRef attachmentsFolder = (NodeRef) ruleAction.getParameterValue(PARAM_ATTCHMENTS_FOLDER);
      if (attachmentsFolder != null) {

        List<FileInfo> attachFiles = fileFolderService.listFiles(attachmentsFolder);
        if (attachFiles != null && attachFiles.size() > 0) {
          for (FileInfo attachFile : attachFiles) {
            ContentReader contentReader = fileFolderService.getReader(attachFile.getNodeRef());
            ByteArrayResource resource =
                new ByteArrayResource(IOUtils.toByteArray(contentReader.getContentInputStream()));
            helper.addAttachment(attachFile.getName(), resource, contentReader.getMimetype());
          }
        }
      }

      // Send the message unless we are in "testMode"
      javaMailSender.send(message);
    } catch (Exception e) {
      String toUser = (String) ruleAction.getParameterValue(PARAM_TO);
      if (toUser == null) {
        Object obj = ruleAction.getParameterValue(PARAM_TO_MANY);
        if (obj != null) {
          toUser = obj.toString();
        }
      }

      logger.error("Failed to send email to " + toUser, e);

      throw new AlfrescoRuntimeException("Failed to send email to:" + toUser, e);
    }
  }
  /**
   * @see
   *     org.alfresco.repo.content.transform.ContentTransformer#transform(org.alfresco.service.cmr.repository.ContentReader,
   *     org.alfresco.service.cmr.repository.ContentWriter,
   *     org.alfresco.service.cmr.repository.TransformationOptions)
   */
  public final void transform(
      ContentReader reader, ContentWriter writer, TransformationOptions options)
      throws ContentIOException {
    try {
      depth.set(depth.get() + 1);

      // begin timing
      long before = System.currentTimeMillis();

      String sourceMimetype = reader.getMimetype();
      String targetMimetype = writer.getMimetype();

      // check options map
      if (options == null) {
        options = new TransformationOptions();
      }

      try {
        if (transformerDebug.isEnabled()) {
          transformerDebug.pushTransform(
              this,
              reader.getContentUrl(),
              sourceMimetype,
              targetMimetype,
              reader.getSize(),
              options);
        }

        // Check the transformability
        checkTransformable(reader, writer, options);

        // Pass on any limits to the reader
        setReaderLimits(reader, writer, options);

        // Transform
        // MNT-12238: CLONE - CLONE - Upload of PPTX causes very high memory usage leading to system
        // instability
        // Limiting transformation up to configured amount of milliseconds to avoid very high RAM
        // consumption
        // and OOM during transforming problematic documents
        TransformationOptionLimits limits =
            getLimits(reader.getMimetype(), writer.getMimetype(), options);

        long timeoutMs = limits.getTimeoutMs();
        if (!useTimeoutThread || (null == limits) || (-1 == timeoutMs)) {
          transformInternal(reader, writer, options);
        } else {
          Future<?> submittedTask = null;
          StreamAwareContentReaderProxy proxiedReader = new StreamAwareContentReaderProxy(reader);
          StreamAwareContentWriterProxy proxiedWriter = new StreamAwareContentWriterProxy(writer);

          try {
            submittedTask =
                getExecutorService()
                    .submit(new TransformInternalCallable(proxiedReader, proxiedWriter, options));
            submittedTask.get(timeoutMs + additionalThreadTimout, TimeUnit.MILLISECONDS);
          } catch (TimeoutException e) {
            releaseResources(submittedTask, proxiedReader, proxiedWriter);
            throw new TimeoutException("Transformation failed due to timeout limit");
          } catch (InterruptedException e) {
            releaseResources(submittedTask, proxiedReader, proxiedWriter);
            throw new InterruptedException(
                "Transformation failed, because the thread of the transformation was interrupted");
          } catch (ExecutionException e) {
            Throwable cause = e.getCause();
            if (cause instanceof TransformInternalCallableException) {
              cause = ((TransformInternalCallableException) cause).getCause();
            }

            throw cause;
          }
        }

        // record time
        long after = System.currentTimeMillis();
        recordTime(sourceMimetype, targetMimetype, after - before);
      } catch (ContentServiceTransientException cste) {
        // A transient failure has occurred within the content transformer.
        // This should not be interpreted as a failure and therefore we should not
        // update the transformer's average time.
        if (logger.isDebugEnabled()) {
          logger.debug(
              "Transformation has been transiently declined: \n"
                  + "   reader: "
                  + reader
                  + "\n"
                  + "   writer: "
                  + writer
                  + "\n"
                  + "   options: "
                  + options
                  + "\n"
                  + "   transformer: "
                  + this);
        }
        // the finally block below will still perform tidyup. Otherwise we're done.
        // We rethrow the exception
        throw cste;
      } catch (UnsupportedTransformationException e) {
        // Don't record an error or even the time, as this is normal in compound transformations.
        transformerDebug.debug("          Failed", e);
        throw e;
      } catch (Throwable e) {
        // Make sure that this transformation gets set back i.t.o. time taken.
        // This will ensure that transformers that compete for the same transformation
        // will be prejudiced against transformers that tend to fail
        long after = System.currentTimeMillis();
        recordError(sourceMimetype, targetMimetype, after - before);

        // Ask Tika to detect the document, and report back on if
        //  the current mime type is plausible
        String differentType = getMimetypeService().getMimetypeIfNotMatches(reader.getReader());

        // Report the error
        if (differentType == null) {
          transformerDebug.debug("          Failed", e);
          throw new ContentIOException(
              "Content conversion failed: \n"
                  + "   reader: "
                  + reader
                  + "\n"
                  + "   writer: "
                  + writer
                  + "\n"
                  + "   options: "
                  + options.toString(false)
                  + "\n"
                  + "   limits: "
                  + getLimits(reader, writer, options),
              e);
        } else {
          transformerDebug.debug("          Failed: Mime type was '" + differentType + "'", e);

          if (this.retryTransformOnDifferentMimeType) {
            // MNT-11015 fix.
            // Set a new reader to refresh the input stream.
            reader = reader.getReader();
            // set the actual file MIME type detected by Tika for content reader
            reader.setMimetype(differentType);

            // Get correct transformer according actual file MIME type and try to transform file
            // with
            // actual transformer
            ContentTransformer transformer =
                this.registry.getTransformer(
                    differentType, reader.getSize(), targetMimetype, options);
            if (null != transformer) {
              transformer.transform(reader, writer, options);
            } else {
              transformerDebug.debug("          Failed", e);
              throw new ContentIOException(
                  "Content conversion failed: \n"
                      + "   reader: "
                      + reader
                      + "\n"
                      + "   writer: "
                      + writer
                      + "\n"
                      + "   options: "
                      + options.toString(false)
                      + "\n"
                      + "   limits: "
                      + getLimits(reader, writer, options)
                      + "\n"
                      + "   claimed mime type: "
                      + reader.getMimetype()
                      + "\n"
                      + "   detected mime type: "
                      + differentType
                      + "\n"
                      + "   transformer not found"
                      + "\n",
                  e);
            }
          } else {
            throw new ContentIOException(
                "Content conversion failed: \n"
                    + "   reader: "
                    + reader
                    + "\n"
                    + "   writer: "
                    + writer
                    + "\n"
                    + "   options: "
                    + options.toString(false)
                    + "\n"
                    + "   limits: "
                    + getLimits(reader, writer, options)
                    + "\n"
                    + "   claimed mime type: "
                    + reader.getMimetype()
                    + "\n"
                    + "   detected mime type: "
                    + differentType,
                e);
          }
        }
      } finally {
        transformerDebug.popTransform();

        // check that the reader and writer are both closed
        if (reader.isChannelOpen()) {
          logger.error(
              "Content reader not closed by transformer: \n"
                  + "   reader: "
                  + reader
                  + "\n"
                  + "   transformer: "
                  + this);
        }
        if (writer.isChannelOpen()) {
          logger.error(
              "Content writer not closed by transformer: \n"
                  + "   writer: "
                  + writer
                  + "\n"
                  + "   transformer: "
                  + this);
        }
      }

      // done
      if (logger.isDebugEnabled()) {
        logger.debug(
            "Completed transformation: \n"
                + "   reader: "
                + reader
                + "\n"
                + "   writer: "
                + writer
                + "\n"
                + "   options: "
                + options
                + "\n"
                + "   transformer: "
                + this);
      }
    } finally {
      depth.set(depth.get() - 1);
    }
  }