private Versions readVersions(VFSLeaf leaf, VFSLeaf fVersions) { if (fVersions == null) { return new NotVersioned(); } try { VFSContainer fVersionContainer = fVersions.getParentContainer(); VersionsFileImpl versions = (VersionsFileImpl) XStreamHelper.readObject(mystream, fVersions); versions.setVersionFile(fVersions); versions.setCurrentVersion((Versionable) leaf); if (versions.getRevisionNr() == null || versions.getRevisionNr().length() == 0) { versions.setRevisionNr(getNextRevisionNr(versions)); } for (VFSRevision revision : versions.getRevisions()) { RevisionFileImpl revisionImpl = (RevisionFileImpl) revision; revisionImpl.setContainer(fVersionContainer); } return versions; } catch (Exception e) { log.warn("This file is not a versions XML file: " + fVersions, e); fVersions.delete(); VersionsFileImpl versions = new VersionsFileImpl(); versions.setCurrentVersion((Versionable) leaf); versions.setVersioned(isVersioned(leaf)); versions.setRevisionNr(getNextRevisionNr(versions)); log.warn("Deleted corrupt version XML file and created new version XML file: " + versions); // the old revisions can not be restored automatically. They are still on disk, you could // recover them // manually. This is not a perfect solution, but at least the user does not get an RS return versions; } }
@Override public void extract(VFSLeaf document, File bufferFile) throws IOException, DocumentAccessException { if (!(document instanceof LocalFileImpl)) { log.warn("Can only index local file"); return; } List<String> cmds = new ArrayList<String>(); cmds.add(searchModule.getPdfExternalIndexerCmd()); cmds.add(((LocalFileImpl) document).getBasefile().getAbsolutePath()); cmds.add(bufferFile.getAbsolutePath()); CountDownLatch doneSignal = new CountDownLatch(1); ProcessWorker worker = new ProcessWorker(cmds, doneSignal); worker.start(); try { doneSignal.await(3000, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { log.error("", e); } worker.destroyProcess(); }
private void loadSelectionKeysFromConfig() { Map<String, String> handlerConfig = cfgFactory.loadConfigForHandler(this); // now "calculate" available year-values for dropdown, according to // handler config if (handlerConfig.containsKey(PROP_FROM) && handlerConfig.containsKey(PROP_TO)) { // we have a valid config int nowYear = Calendar.getInstance().get(Calendar.YEAR); String from = handlerConfig.get(PROP_FROM); String to = handlerConfig.get(PROP_TO); int i_from = 1900; int i_to = 1900; if (from.startsWith("+")) i_from = nowYear + Integer.parseInt(from.substring(1)); else if (from.startsWith("-")) i_from = nowYear - Integer.parseInt(from.substring(1)); else i_from = Integer.parseInt(from); if (to.startsWith("+")) i_to = nowYear + Integer.parseInt(to.substring(1)); else if (to.startsWith("-")) i_to = nowYear - Integer.parseInt(to.substring(1)); else i_to = Integer.parseInt(to); if (i_to < i_from) { logger.warn("wrong config in YearPropertyHandler : to is smaller than from..."); // leave selectionKeys to default selectionKeys = getDefaultYears(); } else { // now fill the array int span = i_to - i_from; if (span > 1000) span = 1000; // just prevent toooooo long dropdown-list ^ selectionKeys = new String[span + 1]; for (int j = 0; j <= span; j++) selectionKeys[j] = String.valueOf(i_from + j); } } }
private void refreshUIState() { boolean enabled = false; String cronExpression = ""; if (CoreSpringFactory.containsBean("schedulerFactoryBean")) { log_.info("refreshUIState: schedulerFactoryBean found"); Object schedulerFactoryBean = CoreSpringFactory.getBean("schedulerFactoryBean"); if (schedulerFactoryBean != null && schedulerFactoryBean instanceof Scheduler) { Scheduler schedulerBean = (Scheduler) schedulerFactoryBean; int triggerState; try { triggerState = schedulerBean.getTriggerState("updateStatisticsTrigger", null /*trigger group*/); enabled = (triggerState != Trigger.STATE_NONE) && (triggerState != Trigger.STATE_ERROR); log_.info( "refreshUIState: updateStatisticsTrigger state was " + triggerState + ", enabled now: " + enabled); } catch (SchedulerException e) { log_.warn( "refreshUIState: Got a SchedulerException while asking for the updateStatisticsTrigger's state", e); } } CronTriggerBean triggerBean = (CronTriggerBean) CoreSpringFactory.getBean("updateStatisticsTrigger"); JobDetail jobDetail = triggerBean.getJobDetail(); enabled &= jobDetail.getName().equals("org.olat.statistics.job.enabled"); log_.info("refreshUIState: org.olat.statistics.job.enabled check, enabled now: " + enabled); cronExpression = triggerBean.getCronExpression(); StatisticUpdateManager statisticUpdateManager = getStatisticUpdateManager(); if (statisticUpdateManager == null) { log_.info("refreshUIState: statisticUpdateManager not configured"); enabled = false; } else { enabled &= statisticUpdateManager.isEnabled(); log_.info("refreshUIState: statisticUpdateManager configured, enabled now: " + enabled); } } else { log_.info("refreshUIState: schedulerFactoryBean not found"); } if (enabled) { content.contextPut( "status", getTranslator().translate("statistics.status.enabled", new String[] {cronExpression})); } else { content.contextPut("status", getTranslator().translate("statistics.status.disabled")); } content.contextPut("statisticEnabled", enabled); recalcLastUpdated(); updateStatisticUpdateOngoingFlag(); }
@Override public SearchResults call() throws ParseException { IndexSearcher searcher = null; try { boolean debug = log.isDebug(); if (!searchService.existIndex()) { log.warn("Index does not exist, can't search for queryString: " + queryString); throw new ServiceNotAvailableException("Index does not exist"); } if (debug) log.debug("queryString=" + queryString); searcher = searchService.getIndexSearcher(); BooleanQuery query = searchService.createQuery(queryString, condQueries); if (debug) log.debug("query=" + query); long startTime = System.currentTimeMillis(); int n = SearchServiceFactory.getService().getSearchModuleConfig().getMaxHits(); TopDocs docs = searcher.search(query, n); long queryTime = System.currentTimeMillis() - startTime; if (debug) log.debug("hits.length()=" + docs.totalHits); SearchResultsImpl searchResult = new SearchResultsImpl( searchService.getMainIndexer(), searcher, docs, query, searchService.getAnalyzer(), identity, roles, firstResult, maxResults, doHighlighting, false); searchResult.setQueryTime(queryTime); searchResult.setNumberOfIndexDocuments(docs.totalHits); if (debug) log.debug("found=" + docs.totalHits); return searchResult; } catch (ParseException pex) { throw pex; } catch (Exception naex) { log.error("", naex); return null; } finally { searchService.releaseIndexSearcher(searcher); DBFactory.getInstance().commitAndCloseSession(); } }
/** * [used by Spring] * * @param cronExpression */ public void setCronExpression(String cronExpression) { if (CronExpression.isValidExpression(cronExpression)) { this.cronExpression = cronExpression; } else { if (StringHelper.containsNonWhitespace(cronExpression)) { // was not empty, so someone tried to set someting here, let user know that it was garbage log.warn( "Configured cron expression is not valid::" + cronExpression + " check your search.indexing.cronjob.expression property", null); } this.cronExpression = null; } }
private static ContainerAndFile findWritableRootFolderForRecursion( VFSContainer rootDir, String relFilePath, int recursionLevel) { recursionLevel++; if (recursionLevel > 20) { // Emergency exit condition: a directory hierarchy that has more than 20 // levels? Probably not.. log.warn( "Reached recursion level while finding writable root Folder - most likely a bug. rootDir::" + rootDir + " relFilePath::" + relFilePath); return null; } if (rootDir instanceof NamedContainerImpl) { rootDir = ((NamedContainerImpl) rootDir).getDelegate(); } if (rootDir instanceof MergeSource) { MergeSource mergedDir = (MergeSource) rootDir; // first check if the next level is not a second MergeSource int stop = relFilePath.indexOf("/", 1); if (stop > 0) { String nextLevel = extractChild(relFilePath); VFSItem item = mergedDir.resolve(nextLevel); if (item instanceof NamedContainerImpl) { item = ((NamedContainerImpl) item).getDelegate(); } if (item instanceof MergeSource) { rootDir = (MergeSource) item; relFilePath = relFilePath.substring(stop); return findWritableRootFolderForRecursion(rootDir, relFilePath, recursionLevel); } } VFSContainer rootWriteContainer = mergedDir.getRootWriteContainer(); if (rootWriteContainer == null) { // we have a merge source without a write container, try it one higher, // go through all children of this one and search the correct child in // the path List<VFSItem> children = rootDir.getItems(); if (children.isEmpty()) { // ups, a merge source without children, no good, return null return null; } String nextChildName = relFilePath.substring(1, relFilePath.indexOf("/", 1)); for (VFSItem child : children) { // look up for the next child in the path if (child.getName().equals(nextChildName)) { // use this child as new root and remove the child name from the rel // path if (child instanceof VFSContainer) { rootDir = (VFSContainer) child; relFilePath = relFilePath.substring(relFilePath.indexOf("/", 1)); break; } else { // ups, a merge source with a child that is not a VFSContainer - // no good, return null return null; } } } } else { // ok, we found a merge source with a write container rootDir = rootWriteContainer; } } if (rootDir != null && rootDir instanceof LocalFolderImpl) { // finished, we found a local folder we can use to write return new ContainerAndFile(rootDir, relFilePath); } else { // do recursion return findWritableRootFolderForRecursion(rootDir, relFilePath, recursionLevel); } }