private void deleteMaps() { List<PortfolioStructure> publicMaps = epFrontendManager.getStructureElementsFromOthers( ident2, null, ElementType.STRUCTURED_MAP, ElementType.DEFAULT_MAP); int i = 1; for (PortfolioStructure portfolioStructure : publicMaps) { i++; epFrontendManager.deletePortfolioStructure(portfolioStructure); if (i % 100 == 0) { DBFactory.getInstance().closeSession(); } } }
private void internalTestManyArtefactCreation(int artefactAmount) { long start = System.currentTimeMillis(); Runtime r = Runtime.getRuntime(); for (int j = 0; j < artefactAmount; j++) { AbstractArtefact artefact = createAndFillArtefact(j); // tag the artefacts if (j % 2 == 0) { epFrontendManager.setArtefactTags(ident1, artefact, tagList1); } else { epFrontendManager.setArtefactTags(ident1, artefact, tagList2); } if (j % 10 == 0) { DBFactory.getInstance().closeSession(); } if (j % 100 == 0) { logger.info("created another 100 artefacts! -> " + j); logger.info(" free memory: " + r.freeMemory()); } } // for // load the whole artefact list long now = System.currentTimeMillis(); logger.info("created " + artefactAmount + " artefacts in: " + (now - start) + " ms."); start = System.currentTimeMillis(); List<AbstractArtefact> artList = epFrontendManager.getArtefactPoolForUser(ident1); now = System.currentTimeMillis(); logger.info("querying all of them took: " + (now - start) + " ms."); assertEquals(artList.size(), artefactAmount); // filter artefacts by tags EPFilterSettings filterSettings = new EPFilterSettings(); filterSettings.setTagFilter(new ArrayList<String>(Arrays.asList("Schule"))); start = System.currentTimeMillis(); artList = epFrontendManager.filterArtefactsByFilterSettings( filterSettings, ident1, new Roles(false, false, false, false, false, false, false)); now = System.currentTimeMillis(); logger.info("filter artefacts by one tag took: " + (now - start) + " ms."); assertEquals(artList.size(), artefactAmount / 2); filterSettings.setTagFilter(tagList1); start = System.currentTimeMillis(); artList = epFrontendManager.filterArtefactsByFilterSettings( filterSettings, ident1, new Roles(false, false, false, false, false, false, false)); now = System.currentTimeMillis(); logger.info("filter artefacts by tagList1 took: " + (now - start) + " ms."); assertEquals(artList.size(), artefactAmount / 2); }
@Override public SearchResults call() throws ParseException { IndexSearcher searcher = null; try { boolean debug = log.isDebug(); if (!searchService.existIndex()) { log.warn("Index does not exist, can't search for queryString: " + queryString); throw new ServiceNotAvailableException("Index does not exist"); } if (debug) log.debug("queryString=" + queryString); searcher = searchService.getIndexSearcher(); BooleanQuery query = searchService.createQuery(queryString, condQueries); if (debug) log.debug("query=" + query); long startTime = System.currentTimeMillis(); int n = SearchServiceFactory.getService().getSearchModuleConfig().getMaxHits(); TopDocs docs = searcher.search(query, n); long queryTime = System.currentTimeMillis() - startTime; if (debug) log.debug("hits.length()=" + docs.totalHits); SearchResultsImpl searchResult = new SearchResultsImpl( searchService.getMainIndexer(), searcher, docs, query, searchService.getAnalyzer(), identity, roles, firstResult, maxResults, doHighlighting, false); searchResult.setQueryTime(queryTime); searchResult.setNumberOfIndexDocuments(docs.totalHits); if (debug) log.debug("found=" + docs.totalHits); return searchResult; } catch (ParseException pex) { throw pex; } catch (Exception naex) { log.error("", naex); return null; } finally { searchService.releaseIndexSearcher(searcher); DBFactory.getInstance().commitAndCloseSession(); } }
private void deleteIdentities(List<Identity> identities, List<String> errors) { for (Identity id : identities) { try { UserDeletionManager.getInstance().deleteIdentity(id); } catch (Exception e) { errors.add(id.getName()); logError("", e); } finally { try { DBFactory.getInstance().intermediateCommit(); } catch (Exception e) { logError("", e); } } } }
@Override public void run() { long start = System.nanoTime(); log.info("Start scanning for QTI resources"); List<String> types = new ArrayList<>(2); types.add(TestFileResource.TYPE_NAME); types.add(SurveyFileResource.TYPE_NAME); List<OLATResource> qtiResources = CoreSpringFactory.getImpl(OLATResourceManager.class).findResourceByTypes(types); DBFactory.getInstance().commitAndCloseSession(); for (OLATResource qtiResource : qtiResources) { OnyxModule.isOnyxTest(qtiResource); } log.info( qtiResources.size() + " QTI Resources scanned in (ms): " + CodeHelper.nanoToMilliTime(start)); }
private void doProcess(List<BulkAssessmentFeedback> feedbacks) { final DB dbInstance = DBFactory.getInstance(); final BaseSecurity securityManager = CoreSpringFactory.getImpl(BaseSecurity.class); final Identity coachIdentity = securityManager.loadIdentityByKey(coachedIdentity); final ICourse course = CourseFactory.loadCourse(courseRes); final AssessableCourseNode courseNode = getCourseNode(); final Roles studentRoles = new Roles(false, false, false, false, false, false, false, false); final boolean hasUserComment = courseNode.hasCommentConfigured(); final boolean hasScore = courseNode.hasScoreConfigured(); final boolean hasPassed = courseNode.hasPassedConfigured(); final boolean hasReturnFiles = (StringHelper.containsNonWhitespace(datas.getReturnFiles()) && (courseNode instanceof TACourseNode || courseNode instanceof GTACourseNode)); if (hasReturnFiles) { try { OlatRootFileImpl returnFilesZipped = new OlatRootFileImpl(datas.getReturnFiles(), null); String tmp = FolderConfig.getCanonicalTmpDir(); unzipped = new File(tmp, UUID.randomUUID().toString() + File.separatorChar); unzipped.mkdirs(); ZipUtil.unzip(returnFilesZipped.getBasefile(), unzipped); } catch (Exception e) { log.error("Cannot unzip the return files during bulk assessment", e); } } Float min = null; Float max = null; Float cut = null; if (hasScore) { min = courseNode.getMinScoreConfiguration(); max = courseNode.getMaxScoreConfiguration(); } if (hasPassed) { cut = courseNode.getCutValueConfiguration(); } int count = 0; List<BulkAssessmentRow> rows = datas.getRows(); for (BulkAssessmentRow row : rows) { Long identityKey = row.getIdentityKey(); if (identityKey == null) { feedbacks.add(new BulkAssessmentFeedback("bulk.action.no.such.user", row.getAssessedId())); continue; // nothing to do } Identity identity = securityManager.loadIdentityByKey(identityKey); IdentityEnvironment ienv = new IdentityEnvironment(identity, studentRoles); UserCourseEnvironment uce = new UserCourseEnvironmentImpl(ienv, course.getCourseEnvironment()); // update comment, empty string will reset comment String userComment = row.getComment(); if (hasUserComment && userComment != null) { // Update userComment in db courseNode.updateUserUserComment(userComment, uce, coachIdentity); // LD: why do we have to update the efficiency statement? // EfficiencyStatementManager esm = EfficiencyStatementManager.getInstance(); // esm.updateUserEfficiencyStatement(uce); } // update score Float score = row.getScore(); if (hasScore && score != null) { // score < minimum score if ((min != null && score.floatValue() < min.floatValue()) || (score.floatValue() < AssessmentHelper.MIN_SCORE_SUPPORTED)) { // "bulk.action.lessThanMin"; } // score > maximum score else if ((max != null && score.floatValue() > max.floatValue()) || (score.floatValue() > AssessmentHelper.MAX_SCORE_SUPPORTED)) { // "bulk.action.greaterThanMax"; } else { // score between minimum and maximum score ScoreEvaluation se; if (hasPassed && cut != null) { Boolean passed = (score.floatValue() >= cut.floatValue()) ? Boolean.TRUE : Boolean.FALSE; se = new ScoreEvaluation(score, passed); } else { se = new ScoreEvaluation(score, null); } // Update score,passed properties in db, and the user's efficiency statement courseNode.updateUserScoreEvaluation(se, uce, coachIdentity, false); } } Boolean passed = row.getPassed(); if (hasPassed && passed != null && cut == null) { // Configuration of manual assessment --> Display passed/not passed: yes, // Type of display: Manual by tutor ScoreEvaluation seOld = courseNode.getUserScoreEvaluation(uce); Float oldScore = seOld.getScore(); ScoreEvaluation se = new ScoreEvaluation(oldScore, passed); // Update score,passed properties in db, and the user's efficiency statement boolean incrementAttempts = false; courseNode.updateUserScoreEvaluation(se, uce, coachIdentity, incrementAttempts); } boolean identityHasReturnFile = false; if (hasReturnFiles && row.getReturnFiles() != null && row.getReturnFiles().size() > 0) { String assessedId = row.getAssessedId(); File assessedFolder = new File(unzipped, assessedId); identityHasReturnFile = assessedFolder.exists(); if (identityHasReturnFile) { processReturnFile(courseNode, row, uce, assessedFolder); } } if (courseNode instanceof GTACourseNode) { // push the state further GTACourseNode gtaNode = (GTACourseNode) courseNode; if ((hasScore && score != null) || (hasPassed && passed != null)) { // pushed to graded updateTasksState(gtaNode, uce, TaskProcess.grading); } else if (hasReturnFiles) { // push to revised updateTasksState(gtaNode, uce, TaskProcess.correction); } } if (count++ % 5 == 0) { dbInstance.commitAndCloseSession(); } else { dbInstance.commit(); } } }
/** * Internal helper to deal with the handbrake console output and update the transcoding metadata * * @param proc * @param videoTranscoding * @param transcodedFile * @return true: everything fine; false: an error happended somewhere */ private final boolean updateVideoTranscodingFromProcessOutput( Process proc, VideoTranscoding videoTranscoding, File transcodedFile) { VideoManager videoManager = CoreSpringFactory.getImpl(VideoManager.class); StringBuilder errors = new StringBuilder(); StringBuilder output = new StringBuilder(); String line; // Read from standard input and parse percentages of transcoding process InputStream stdout = proc.getInputStream(); InputStreamReader isr = new InputStreamReader(stdout); BufferedReader br = new BufferedReader(isr); line = null; try { while ((line = br.readLine()) != null) { output.append(line); // Parse the percentage. Logline looks like this: // Encoding: task 1 of 1, 85.90 % (307.59 fps, avg 330.35 fps, ETA 00h00m05s) int start = line.indexOf(","); if (start != -1) { line = line.substring(start); int end = line.indexOf("."); if (end != -1 && end < 5) { String percent = line.substring(2, end); log.debug("Output: " + percent); // update version file for UI try { videoTranscoding.setStatus(Integer.parseInt(percent)); videoTranscoding = videoManager.updateVideoTranscoding(videoTranscoding); DBFactory.getInstance().commitAndCloseSession(); } catch (ObjectDeletedException e) { // deleted by other process proc.destroy(); br.close(); return false; } } } } } catch (IOException e) { // } finally { try { stdout.close(); isr.close(); br.close(); } catch (Exception e2) { // ignore } } // Read and ignore errors, Handbrake outputs a lot info on startup. Only // display errors in debug level InputStream stderr = proc.getErrorStream(); InputStreamReader iserr = new InputStreamReader(stderr); BufferedReader berr = new BufferedReader(iserr); line = null; try { while ((line = berr.readLine()) != null) { errors.append(line); log.debug("Error: " + line); } } catch (IOException e) { // } finally { try { stderr.close(); iserr.close(); berr.close(); } catch (Exception e2) { // ignore } } try { // On finish, update metadata file int exitValue = proc.waitFor(); if (exitValue == 0) { MovieService movieService = CoreSpringFactory.getImpl(MovieService.class); Size videoSize = movieService.getSize(new LocalFileImpl(transcodedFile), VideoManagerImpl.FILETYPE_MP4); videoTranscoding.setWidth(videoSize.getWidth()); videoTranscoding.setHeight(videoSize.getHeight()); videoTranscoding.setSize(transcodedFile.length()); videoTranscoding.setStatus(VideoTranscoding.TRANSCODING_STATUS_DONE); videoTranscoding = videoManager.updateVideoTranscoding(videoTranscoding); DBFactory.getInstance().commitAndCloseSession(); return true; } return false; } catch (InterruptedException e) { return false; } }