public void testImmediateRemoval() throws Exception { eagerCleaner.setEagerOrphanCleanup(false); final StoreRef storeRef = nodeService.createStore("test", getName() + "-" + GUID.generate()); RetryingTransactionCallback<ContentData> testCallback = new RetryingTransactionCallback<ContentData>() { public ContentData execute() throws Throwable { // Create some content NodeRef rootNodeRef = nodeService.getRootNode(storeRef); Map<QName, Serializable> properties = new HashMap<QName, Serializable>(13); properties.put(ContentModel.PROP_NAME, (Serializable) "test.txt"); NodeRef contentNodeRef = nodeService .createNode( rootNodeRef, ContentModel.ASSOC_CHILDREN, ContentModel.ASSOC_CHILDREN, ContentModel.TYPE_CONTENT, properties) .getChildRef(); ContentWriter writer = contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true); writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN); writer.putContent("INITIAL CONTENT"); ContentData contentData = writer.getContentData(); // Delete the first node nodeService.deleteNode(contentNodeRef); // Done return contentData; } }; ContentData contentData = transactionService.getRetryingTransactionHelper().doInTransaction(testCallback); // Make sure that the content URL still exists ContentReader reader = contentService.getRawReader(contentData.getContentUrl()); assertNotNull(reader); assertTrue("Content should not have been eagerly deleted.", reader.exists()); // fire the cleaner cleaner.setProtectDays(0); cleaner.execute(); reader = contentService.getRawReader(contentData.getContentUrl()); // the content should have disappeared as it is not in the database assertFalse("Unprotected content was not deleted", reader.exists()); assertTrue("Content listener was not called", deletedUrls.contains(reader.getContentUrl())); }
/** * @see * org.alfresco.repo.content.transform.ContentTransformer#transform(org.alfresco.service.cmr.repository.ContentReader, * org.alfresco.service.cmr.repository.ContentWriter, * org.alfresco.service.cmr.repository.TransformationOptions) */ public final void transform( ContentReader reader, ContentWriter writer, TransformationOptions options) throws ContentIOException { try { depth.set(depth.get() + 1); // begin timing long before = System.currentTimeMillis(); String sourceMimetype = reader.getMimetype(); String targetMimetype = writer.getMimetype(); // check options map if (options == null) { options = new TransformationOptions(); } try { if (transformerDebug.isEnabled()) { transformerDebug.pushTransform( this, reader.getContentUrl(), sourceMimetype, targetMimetype, reader.getSize(), options); } // Check the transformability checkTransformable(reader, writer, options); // Pass on any limits to the reader setReaderLimits(reader, writer, options); // Transform // MNT-12238: CLONE - CLONE - Upload of PPTX causes very high memory usage leading to system // instability // Limiting transformation up to configured amount of milliseconds to avoid very high RAM // consumption // and OOM during transforming problematic documents TransformationOptionLimits limits = getLimits(reader.getMimetype(), writer.getMimetype(), options); long timeoutMs = limits.getTimeoutMs(); if (!useTimeoutThread || (null == limits) || (-1 == timeoutMs)) { transformInternal(reader, writer, options); } else { Future<?> submittedTask = null; StreamAwareContentReaderProxy proxiedReader = new StreamAwareContentReaderProxy(reader); StreamAwareContentWriterProxy proxiedWriter = new StreamAwareContentWriterProxy(writer); try { submittedTask = getExecutorService() .submit(new TransformInternalCallable(proxiedReader, proxiedWriter, options)); submittedTask.get(timeoutMs + additionalThreadTimout, TimeUnit.MILLISECONDS); } catch (TimeoutException e) { releaseResources(submittedTask, proxiedReader, proxiedWriter); throw new TimeoutException("Transformation failed due to timeout limit"); } catch (InterruptedException e) { releaseResources(submittedTask, proxiedReader, proxiedWriter); throw new InterruptedException( "Transformation failed, because the thread of the transformation was interrupted"); } catch (ExecutionException e) { Throwable cause = e.getCause(); if (cause instanceof TransformInternalCallableException) { cause = ((TransformInternalCallableException) cause).getCause(); } throw cause; } } // record time long after = System.currentTimeMillis(); recordTime(sourceMimetype, targetMimetype, after - before); } catch (ContentServiceTransientException cste) { // A transient failure has occurred within the content transformer. // This should not be interpreted as a failure and therefore we should not // update the transformer's average time. if (logger.isDebugEnabled()) { logger.debug( "Transformation has been transiently declined: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options + "\n" + " transformer: " + this); } // the finally block below will still perform tidyup. Otherwise we're done. // We rethrow the exception throw cste; } catch (UnsupportedTransformationException e) { // Don't record an error or even the time, as this is normal in compound transformations. transformerDebug.debug(" Failed", e); throw e; } catch (Throwable e) { // Make sure that this transformation gets set back i.t.o. time taken. // This will ensure that transformers that compete for the same transformation // will be prejudiced against transformers that tend to fail long after = System.currentTimeMillis(); recordError(sourceMimetype, targetMimetype, after - before); // Ask Tika to detect the document, and report back on if // the current mime type is plausible String differentType = getMimetypeService().getMimetypeIfNotMatches(reader.getReader()); // Report the error if (differentType == null) { transformerDebug.debug(" Failed", e); throw new ContentIOException( "Content conversion failed: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options.toString(false) + "\n" + " limits: " + getLimits(reader, writer, options), e); } else { transformerDebug.debug(" Failed: Mime type was '" + differentType + "'", e); if (this.retryTransformOnDifferentMimeType) { // MNT-11015 fix. // Set a new reader to refresh the input stream. reader = reader.getReader(); // set the actual file MIME type detected by Tika for content reader reader.setMimetype(differentType); // Get correct transformer according actual file MIME type and try to transform file // with // actual transformer ContentTransformer transformer = this.registry.getTransformer( differentType, reader.getSize(), targetMimetype, options); if (null != transformer) { transformer.transform(reader, writer, options); } else { transformerDebug.debug(" Failed", e); throw new ContentIOException( "Content conversion failed: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options.toString(false) + "\n" + " limits: " + getLimits(reader, writer, options) + "\n" + " claimed mime type: " + reader.getMimetype() + "\n" + " detected mime type: " + differentType + "\n" + " transformer not found" + "\n", e); } } else { throw new ContentIOException( "Content conversion failed: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options.toString(false) + "\n" + " limits: " + getLimits(reader, writer, options) + "\n" + " claimed mime type: " + reader.getMimetype() + "\n" + " detected mime type: " + differentType, e); } } } finally { transformerDebug.popTransform(); // check that the reader and writer are both closed if (reader.isChannelOpen()) { logger.error( "Content reader not closed by transformer: \n" + " reader: " + reader + "\n" + " transformer: " + this); } if (writer.isChannelOpen()) { logger.error( "Content writer not closed by transformer: \n" + " writer: " + writer + "\n" + " transformer: " + this); } } // done if (logger.isDebugEnabled()) { logger.debug( "Completed transformation: \n" + " reader: " + reader + "\n" + " writer: " + writer + "\n" + " options: " + options + "\n" + " transformer: " + this); } } finally { depth.set(depth.get() - 1); } }