@Override public void execute(JobExecutionContext context) throws JobExecutionException { String productId = context.getMergedJobDataMap().getString(JobStatus.TARGET_ID); Boolean lazy = context.getMergedJobDataMap().getBoolean(LAZY_REGEN); List<String> l = new ArrayList<String>(); l.add(productId); Set<Owner> owners = subAdapter.lookupOwnersByProduct(l); for (Owner owner : owners) { poolManager.refreshPools(owner, lazy); } context.setResult("Pools refreshed for product " + productId); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { if (checklistNotificationsUri == null) { return; } boolean started = !HibernateUtil.getSessionFactory().getCurrentSession().getTransaction().isActive(); if (started) { HibernateUtil.getSessionFactory().getCurrentSession().getTransaction().begin(); } try { URI researchObjectUri = (URI) context.getMergedJobDataMap().get(RESEARCH_OBJECT_URI); SyndFeedInput input = new SyndFeedInput(); URI requestedUri = createQueryUri(getTheLastFeedDate(researchObjectUri), researchObjectUri); try { context.setResult(input.build(new XmlReader(requestedUri.toURL()))); } catch (IllegalArgumentException | FeedException | IOException e) { LOGGER.error("Can't get the feed " + requestedUri.toString()); } } finally { if (started) { HibernateUtil.getSessionFactory().getCurrentSession().getTransaction().commit(); } } }
@Test public void doTest() throws SchedulerException, MeasurementException { PollJob job = new PollJob(); JobExecutionContext context = mock(JobExecutionContext.class); Map params = new HashMap(); JobDataMap jobMap = new JobDataMap(params); MeasurementDefinition def = MocksFactory.createMockMeasurementDefinition(); MeasurementListener listener = mock(MeasurementListener.class); Scheduler scheduler = mock(Scheduler.class); CoreMeasurementService service = mock(CoreMeasurementService.class); SchedulerContext schedulerContext = new SchedulerContext( Collections.singletonMap(PollJob.MEASUREMENT_SERVICE_ATTR_NAME, service)); jobMap.put(PollJob.LISTENER_ATTR_NAME, listener); jobMap.put(PollJob.MEASUREMENT_DEF_ATTR_NAME, def); jobMap.put(PollJob.MEASUREMENT_SERVICE_ATTR_NAME, service); when(context.getMergedJobDataMap()).thenReturn(jobMap); when(context.getScheduler()).thenReturn(scheduler); when(scheduler.getContext()).thenReturn(schedulerContext); CapabilityValue capValue = new CapabilityValue(RandomUtils.nextLong()); when(service.getCapabilityValue(Matchers.<String>any(), Matchers.<String>any())) .thenReturn(capValue); job.execute(context); verify(context).getMergedJobDataMap(); verify(service).getCapabilityValue(def.getResourceUri(), def.getCapabilityUri()); verify(listener).newCapabilityValue(capValue); assertEquals(capValue.getMetricsId(), def.getId()); }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { jobkey = context.getJobDetail().getKey(); log.info( "Excuting job: " + jobkey + " executing at " + new Date() + " fire by: " + context.getTrigger().getKey()); if (context.getMergedJobDataMap().size() > 0) { Set<String> keys = context.getMergedJobDataMap().keySet(); for (String key : keys) { String value = context.getMergedJobDataMap().getString(key); log.info(" jobdatamap entry: " + key + " = " + value); } context.setResult("hello"); } }
@Override protected void executeInternal(JobExecutionContext context) { Map<String, Object> jobDataMap = context.getMergedJobDataMap(); String jobName = (String) jobDataMap.get(JOB_NAME); log.info("Quartz trigger firing with Spring Batch jobName=" + jobName); JobParameters jobParameters = getJobParametersFromJobMap(jobDataMap); try { jobLauncher.run(jobLocator.getJob(jobName), jobParameters); } catch (JobExecutionException e) { log.error("Could not execute job.", e); } }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { Map<String, Object> jobDataMap = context.getMergedJobDataMap(); String jobName = (String) jobDataMap.get(JOB_NAME); try { Job job = jobLocator.getJob(jobName); JobParameters allParams = translateParams(job, jobParameters); jobLauncher.run(job, allParams); } catch (Exception e) { logger.error("Could not execute job.", e); } }
/** @see org.quartz.Job#execute(org.quartz.JobExecutionContext) */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap mergedJobDataMap = context.getMergedJobDataMap(); SchedulerContext schedCtxt = null; try { schedCtxt = context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Error obtaining scheduler context.", e, false); } String fileName = mergedJobDataMap.getString(FILE_NAME); String listenerName = mergedJobDataMap.getString(FILE_SCAN_LISTENER_NAME); if (fileName == null) { throw new JobExecutionException( "Required parameter '" + FILE_NAME + "' not found in merged JobDataMap"); } if (listenerName == null) { throw new JobExecutionException( "Required parameter '" + FILE_SCAN_LISTENER_NAME + "' not found in merged JobDataMap"); } FileScanListener listener = (FileScanListener) schedCtxt.get(listenerName); if (listener == null) { throw new JobExecutionException( "FileScanListener named '" + listenerName + "' not found in SchedulerContext"); } long lastDate = -1; if (mergedJobDataMap.containsKey(LAST_MODIFIED_TIME)) { lastDate = mergedJobDataMap.getLong(LAST_MODIFIED_TIME); } long newDate = getLastModifiedDate(fileName); if (newDate < 0) { log.warn("File '" + fileName + "' does not exist."); return; } if (lastDate > 0 && (newDate != lastDate)) { // notify call back... log.info("File '" + fileName + "' updated, notifying listener."); listener.fileUpdated(fileName); } else if (log.isDebugEnabled()) { log.debug("File '" + fileName + "' unchanged."); } // It is the JobDataMap on the JobDetail which is actually stateful context.getJobDetail().getJobDataMap().put(LAST_MODIFIED_TIME, newDate); }
/** get the next job, if any configured via jobDataAsMap property, and schedule it. */ protected void executeNextJob(JobExecutionContext jobExecutionContext) throws JobExecutionException { String[] keys = jobExecutionContext.getMergedJobDataMap().getKeys(); log.info("ChainedJobBean - keys: " + keys.length); if (hasNextJob(keys)) { String nextJobClassName = (String) jobExecutionContext.getMergedJobDataMap().get(JOB_DATA_MAP_NEXT_JOB_KEY); try { Class<?> theClass = Class.forName(nextJobClassName); QuartzJobBean quartzJobBean = (QuartzJobBean) theClass.newInstance(); log.info("ChainedJobBean - execute next job"); quartzJobBean.execute(jobExecutionContext); } catch (ClassNotFoundException e) { log.error(e); } catch (InstantiationException e) { log.error(e); } catch (IllegalAccessException e) { log.error(e); } } }
@Override public void toExecute(JobExecutionContext context) throws JobExecutionException { JobDataMap map = context.getMergedJobDataMap(); String consumerUuid = map.getString(JobStatus.TARGET_ID); String cdnLabel = map.getString(CDN_LABEL); String webAppPrefix = map.getString(WEBAPP_PREFIX); String apiUrl = map.getString(API_URL); Map<String, String> extensionData = (Map<String, String>) map.get(EXTENSION_DATA); log.info("Starting async export for {}", consumerUuid); try { ExportResult result = manifestManager.generateAndStoreManifest( consumerUuid, cdnLabel, webAppPrefix, apiUrl, extensionData); context.setResult(result); log.info("Async export complete."); } catch (Exception e) { throw new JobExecutionException(e.getMessage(), e, false); } }
/** @throws java.lang.Exception */ @Before public void setUp() throws Exception { job = new NotificationEventCreationJob(); job.setConfig(config); job.setEndPoint(END_POINT); job.setIdBuilder(idBuilder); job.setIssuer(issuer); job.setProducer(producer); exception = new RuntimeException("Nuts!"); inOrder = inOrder(config, idBuilder, issuer, producer, context, jobDataMap); when(context.getMergedJobDataMap()).thenReturn(jobDataMap); when(jobDataMap.getString(NotificationEventCreationJob.JOB_DATA_KEY_CONTACT_INSTANCE_ID)) .thenReturn(EVENT_INSTANCE_ID); when(jobDataMap.getLong(NotificationEventCreationJob.JOB_DATA_KEY_EVENT_TIME)).thenReturn(NOW); when(jobDataMap.getString(NotificationEventCreationJob.JOB_DATA_KEY_EVENT_TYPE)) .thenReturn(TYPE); when(jobDataMap.getString(NotificationEventCreationJob.JOB_DATA_KEY_GROUND_STATION_NAME)) .thenReturn(GS); when(jobDataMap.getString(NotificationEventCreationJob.JOB_DATA_KEY_SATELLITE_NAME)) .thenReturn(SAT); when(config.getEventNameSpace()).thenReturn(NAME_SPACE); when(idBuilder.buildID(NAME_SPACE, TYPE)).thenReturn(EVENT_ID); when(issuer.getID()).thenReturn(ISSUER); }
/** * Though it says init() it does not instantiate objects every time the service is running. This * simply initialize the local variables with pre-instantiated objects. * * @param jobExecutionContext is the Quartz context * @return true if it is successfully executed. */ public boolean init(JobExecutionContext jobExecutionContext) { JobDataMap jdm = jobExecutionContext.getMergedJobDataMap(); Map<String, Object> parameters = (Map<String, Object>) jdm.get(MessageProcessorConstants.PARAMETERS); sender = (BlockingMsgSender) jdm.get(ScheduledMessageForwardingProcessor.BLOCKING_SENDER); String mdaParam = (String) parameters.get(MessageProcessorConstants.MAX_DELIVER_ATTEMPTS); if (mdaParam != null) { try { maxDeliverAttempts = Integer.parseInt(mdaParam); } catch (NumberFormatException nfe) { parameters.remove(MessageProcessorConstants.MAX_DELIVER_ATTEMPTS); log.error("Invalid value for max delivery attempts switching back to default value", nfe); } } if (jdm.get(ForwardingProcessorConstants.TARGET_ENDPOINT) != null) { targetEndpoint = (String) jdm.get(ForwardingProcessorConstants.TARGET_ENDPOINT); } String ri = (String) parameters.get(MessageProcessorConstants.RETRY_INTERVAL); if (ri != null) { try { retryInterval = Integer.parseInt(ri); } catch (NumberFormatException nfe) { parameters.remove(MessageProcessorConstants.RETRY_INTERVAL); log.error("Invalid value for retry interval switching back to default value", nfe); } } messageProcessor = (MessageProcessor) jdm.get(MessageProcessorConstants.PROCESSOR_INSTANCE); messageConsumer = messageProcessor.getMessageConsumer(); if (parameters.get(ForwardingProcessorConstants.FAULT_SEQUENCE) != null) { faultSeq = (String) parameters.get(ForwardingProcessorConstants.FAULT_SEQUENCE); } if (parameters.get(ForwardingProcessorConstants.REPLY_SEQUENCE) != null) { replySeq = (String) parameters.get(ForwardingProcessorConstants.REPLY_SEQUENCE); } if (jdm.get(ForwardingProcessorConstants.NON_RETRY_STATUS_CODES) != null) { nonRetryStatusCodes = (String[]) jdm.get(ForwardingProcessorConstants.NON_RETRY_STATUS_CODES); } if (jdm.get(ForwardingProcessorConstants.THROTTLE) != null) { isThrottling = (Boolean) jdm.get(ForwardingProcessorConstants.THROTTLE); } if (isThrottling) { if (jdm.get(ForwardingProcessorConstants.THROTTLE_INTERVAL) != null) { throttlingInterval = (Long) jdm.get(ForwardingProcessorConstants.THROTTLE_INTERVAL); } } // Configure property for the drop message after maximum delivery if (parameters.get(ForwardingProcessorConstants.MAX_DELIVERY_DROP) != null) { if ((parameters.get(ForwardingProcessorConstants.MAX_DELIVERY_DROP)) .toString() .equals("Enabled")) { if (this.maxDeliverAttempts > 0) { isMaxDeliveryAttemptDropEnabled = true; } } } return true; }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { System.out.println("sono partito "); // retrive servletContext ServletContext servletContext = (ServletContext) context.getMergedJobDataMap().get("servletContext"); System.out.println("ho recuperato il context "); // Get bean from servlet context (same as application scope) DbmanagerBean dbmanager = (DbmanagerBean) servletContext.getAttribute("dbmanager"); System.out.println("ho il dbmanager "); // Get bean from servlet context (same as application scope) MailerBean mailer = (MailerBean) servletContext.getAttribute("mailer"); System.out.println("ho il mailerbean "); JobDataMap dataMap = context.getJobDetail().getJobDataMap(); System.out.println("ho pure l'id dell'asta "); int auction_id = dataMap.getInt("auction_id"); System.out.println("ovvero " + auction_id); Auction auction; Sell sell = new Sell(); Double tax, price; try { System.out.println("mi chiamo " + dbmanager.findUsernameById(1)); auction = dbmanager.findAuctionById(auction_id); System.out.println("Provo a chiudere l' asta " + auction.getDescription()); dbmanager.closeAuction(auction_id); System.out.println("Ho chiuso asta " + auction.getDescription()); // generazione di una vendita base con tasse 1.23 sell.setSeller_id(auction.getUser_id()); sell.setAuction_id(auction_id); sell.setFinal_price(0); sell.setTax(1.23); System.out.println("Generato i dati base di un asta "); // se c'è winner if (auction.getWinner_id() != 0) { // prelevo il prezzo finale price = auction.getActual_price(); // calcolo le tasse tax = roundToHalf((price / 100) * 1.25); sell.setFinal_price(price); sell.setTax(tax); sell.setBuyer_id(auction.getWinner_id()); } // inserisco il sell al db dbmanager.insertSell(sell); System.out.println("Inserito il sell correttamente "); User user = dbmanager.findUser(auction.getUser_id()); User winner = dbmanager.findUser(auction.getWinner_id()); try { // notifico al venditore mailer.SendMail( user.getMail(), "Asta " + auction.getId() + " conclusa", "L'asta " + auction.getDescription() + " si è conclusa."); if (winner != null) { // notifico al winner se esiste mailer.SendMail( winner.getMail(), "Asta " + auction.getId() + " conclusa", "L'asta " + auction.getDescription() + " si è conclusa, sei il vincitore!"); } } catch (MessagingException ex) { Logger.getLogger(CloseAuctionJob.class.getName()).log(Level.SEVERE, null, ex); } } catch (SQLException ex) { System.out.println("non va dbmanager "); Logger.getLogger(CloseAuctionJob.class.getName()).log(Level.SEVERE, null, ex); } }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { // need to generate a Locale so that user beans and other things will // generate normally Locale locale = new Locale("en-US"); ResourceBundleProvider.updateLocale(locale); ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle(); // logger.debug("--"); // logger.debug("-- executing a job " + message + " at " + new // java.util.Date().toString()); JobDataMap dataMap = context.getMergedJobDataMap(); SimpleTrigger trigger = (SimpleTrigger) context.getTrigger(); try { ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext"); String studySubjectNumber = ((CoreResources) appContext.getBean("coreResources")).getField("extract.number"); coreResources = (CoreResources) appContext.getBean("coreResources"); ruleSetRuleDao = (RuleSetRuleDao) appContext.getBean("ruleSetRuleDao"); dataSource = (DataSource) appContext.getBean("dataSource"); mailSender = (OpenClinicaMailSender) appContext.getBean("openClinicaMailSender"); AuditEventDAO auditEventDAO = new AuditEventDAO(dataSource); // Scheduler scheduler = context.getScheduler(); // JobDetail detail = context.getJobDetail(); // jobDetailBean = (JobDetailBean) detail; /* * data map here should coincide with the job data map found in * CreateJobExportServlet, with the following code: jobDataMap = new * JobDataMap(); jobDataMap.put(DATASET_ID, datasetId); * jobDataMap.put(PERIOD, period); jobDataMap.put(EMAIL, email); * jobDataMap.put(TAB, tab); jobDataMap.put(CDISC, cdisc); * jobDataMap.put(SPSS, spss); */ String alertEmail = dataMap.getString(EMAIL); String localeStr = dataMap.getString(LOCALE); if (localeStr != null) { locale = new Locale(localeStr); ResourceBundleProvider.updateLocale(locale); pageMessages = ResourceBundleProvider.getPageMessagesBundle(); } int dsId = dataMap.getInt(DATASET_ID); String tab = dataMap.getString(TAB); String cdisc = dataMap.getString(CDISC); String cdisc12 = dataMap.getString(CDISC12); if (cdisc12 == null) { cdisc12 = "0"; } String cdisc13 = dataMap.getString(CDISC13); if (cdisc13 == null) { cdisc13 = "0"; } String cdisc13oc = dataMap.getString(CDISC13OC); if (cdisc13oc == null) { cdisc13oc = "0"; } String spss = dataMap.getString(SPSS); int userId = dataMap.getInt(USER_ID); int studyId = dataMap.getInt(STUDY_ID); // String datasetId = dataMap.getString(DATASET_ID); // int dsId = new Integer(datasetId).intValue(); // String userAcctId = dataMap.getString(USER_ID); // int userId = new Integer(userAcctId).intValue(); // why the flip-flop? if one property is set to 'true' we can // see jobs in another screen but all properties have to be // strings logger.debug("-- found the job: " + dsId + " dataset id"); // for (Iterator it = dataMap.entrySet().iterator(); it.hasNext();) // { // java.util.Map.Entry entry = (java.util.Map.Entry) it.next(); // Object key = entry.getKey(); // Object value = entry.getValue(); // // logger.debug("-- found datamap property: " + key.toString() + // // " : " + value.toString()); // } HashMap fileName = new HashMap<String, Integer>(); if (dsId > 0) { // trying to not throw an error if there's no dataset id DatasetDAO dsdao = new DatasetDAO(dataSource); DatasetBean datasetBean = (DatasetBean) dsdao.findByPK(dsId); StudyDAO studyDao = new StudyDAO(dataSource); UserAccountDAO userAccountDAO = new UserAccountDAO(dataSource); // hmm, three lines in the if block DRY? String generalFileDir = ""; String generalFileDirCopy = ""; String exportFilePath = SQLInitServlet.getField("exportFilePath"); String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator; SimpleDateFormat sdfDir = new SimpleDateFormat(pattern); generalFileDir = DATASET_DIR + datasetBean.getId() + File.separator + sdfDir.format(new java.util.Date()); if (!"".equals(exportFilePath)) { generalFileDirCopy = SQLInitServlet.getField("filePath") + exportFilePath + File.separator; } // logger.debug("-- created the following dir: " + // generalFileDir); long sysTimeBegin = System.currentTimeMillis(); // set up the user bean here, tbh // logger.debug("-- gen tab file 00"); userBean = (UserAccountBean) userAccountDAO.findByPK(userId); // needs to also be captured by the servlet, tbh // logger.debug("-- gen tab file 00"); generateFileService = new GenerateExtractFileService(dataSource, userBean, coreResources, ruleSetRuleDao); // logger.debug("-- gen tab file 00"); // tbh #5796 - covers a bug when the user changes studies, 10/2010 StudyBean activeStudy = (StudyBean) studyDao.findByPK(studyId); StudyBean parentStudy = new StudyBean(); logger.debug( "active study: " + studyId + " parent study: " + activeStudy.getParentStudyId()); if (activeStudy.getParentStudyId() > 0) { // StudyDAO sdao = new StudyDAO(sm.getDataSource()); parentStudy = (StudyBean) studyDao.findByPK(activeStudy.getParentStudyId()); } else { parentStudy = activeStudy; // covers a bug in tab file creation, tbh 01/2009 } logger.debug("-- found extract bean "); ExtractBean eb = generateFileService.generateExtractBean(datasetBean, activeStudy, parentStudy); MessageFormat mf = new MessageFormat(""); StringBuffer message = new StringBuffer(); StringBuffer auditMessage = new StringBuffer(); // use resource bundle page messages to generate the email, tbh // 02/2009 // message.append(pageMessages.getString("html_email_header_1") // + " " + alertEmail + // pageMessages.getString("html_email_header_2") + "<br/>"); message.append( "<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>"); message.append("<P>Dataset: " + datasetBean.getName() + "</P>"); message.append("<P>Study: " + activeStudy.getName() + "</P>"); message.append( "<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2") + SQLInitServlet.getField("sysURL") + pageMessages.getString("html_email_body_3") + "</p>"); // logger.debug("-- gen tab file 00"); if ("1".equals(tab)) { logger.debug("-- gen tab file 01"); fileName = generateFileService.createTabFile( eb, sysTimeBegin, generalFileDir, datasetBean, activeStudy.getId(), parentStudy.getId(), generalFileDirCopy); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_tab_delimited")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your tab-delimited file <a href='AccessFile?fileId=" // + getFileIdInt(fileName) + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_tab_delimited") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(cdisc)) { String odmVersion = "oc1.2"; fileName = generateFileService.createODMFile( odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null); logger.debug("-- gen odm file"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_odm_12")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your ODM 1.2 w/OpenClinica Extension XML file <a // href='AccessFile?fileId=" // + getFileIdInt(fileName) // + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_odm_12") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(cdisc12)) { String odmVersion = "1.2"; fileName = generateFileService.createODMFile( odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null); logger.debug("-- gen odm file 1.2 default"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // mf.applyPattern(pageMessages.getString( // "you_can_access_odm_12_xml")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // // auditMessage.append( // "You can access your ODM 1.2 XML file <a href='AccessFile?fileId=" // + getFileIdInt(fileName) + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_odm_12_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(cdisc13)) { String odmVersion = "1.3"; fileName = generateFileService.createODMFile( odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null); logger.debug("-- gen odm file 1.3"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_odm_13")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your ODM 1.3 XML file <a href='AccessFile?fileId=" // + getFileIdInt(fileName) + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_odm_13") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(cdisc13oc)) { String odmVersion = "oc1.3"; fileName = generateFileService.createODMFile( odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null); logger.debug("-- gen odm file 1.3 oc"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_odm_13_xml")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your ODM 1.3 w/OpenClinica Extension XML file <a // href='AccessFile?fileId=" // + getFileIdInt(fileName) // + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_odm_13_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } if ("1".equals(spss)) { SPSSReportBean answer = new SPSSReportBean(); fileName = generateFileService.createSPSSFile( datasetBean, eb, activeStudy, parentStudy, sysTimeBegin, generalFileDir, answer, generalFileDirCopy); logger.debug("-- gen spss file"); message.append( "<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>"); // MessageFormat mf = new MessageFormat(""); // mf.applyPattern(pageMessages.getString( // "you_can_access_spss")); // Object[] arguments = { getFileIdInt(fileName) }; // auditMessage.append(mf.format(arguments)); // auditMessage.append( // "You can access your SPSS files <a href='AccessFile?fileId=" // + getFileIdInt(fileName) + "'>here</a>.<br/>"); auditMessage.append( pageMessages.getString("you_can_access_spss") + getFileIdInt(fileName) + pageMessages.getString("access_end")); } // wrap up the message, and send the email message.append( "<p>" + pageMessages.getString("html_email_body_5") + "</P><P>" + pageMessages.getString("email_footer")); try { mailSender.sendEmail( alertEmail.trim(), pageMessages.getString("job_ran_for") + " " + datasetBean.getName(), message.toString(), true); } catch (OpenClinicaSystemException ose) { // Do Nothing, In the future we might want to have an email // status added to system. } TriggerBean triggerBean = new TriggerBean(); triggerBean.setDataset(datasetBean); triggerBean.setUserAccount(userBean); triggerBean.setFullName(trigger.getName()); auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, auditMessage.toString()); } else { TriggerBean triggerBean = new TriggerBean(); // triggerBean.setDataset(datasetBean); triggerBean.setUserAccount(userBean); triggerBean.setFullName(trigger.getName()); auditEventDAO.createRowForExtractDataJobFailure(triggerBean); // logger.debug("-- made it here for some reason, ds id: " // + dsId); } // logger.debug("-- generated file: " + fileNameStr); // dataSource. } catch (Exception e) { // TODO Auto-generated catch block -- ideally should generate a fail // msg here, tbh 02/2009 logger.debug("-- found exception: " + e.getMessage()); e.printStackTrace(); } }
protected void executeInternal(JobExecutionContext context) throws JobExecutionException { List<Object[]> lsSTGUsers = new ArrayList<Object[]>(); synDB = (SynDB) context.getMergedJobDataMap().get("synDB"); String eable = synDB.getConfigProperties("ENABLE_BATCH_JOB"); if (eable.equalsIgnoreCase("N")) { return; } long startTime = System.currentTimeMillis(); System.out.println("Batch Job running at " + Calendar.getInstance().getTime()); host = synDB.getConfigProperties("SMTP_HOST"); port = synDB.getConfigProperties("SMTP_PORT"); password = synDB.getConfigProperties("SMTP_PASSWORD"); emailFrom = synDB.getConfigProperties("FROM"); emailTo = synDB.getConfigProperties("TO"); subject = synDB.getConfigProperties("SUBJECT"); username = synDB.getConfigProperties("SMTP_USER"); logfilePath = synDB.getConfigProperties("LOG_FILE"); /** Check disable sync to keycloak db */ String eableOutgoing = synDB.getConfigProperties("ENABLE_KEYCLOAK_DATA"); System.out.println("eableOutgoing: " + eableOutgoing); if (eableOutgoing.equalsIgnoreCase("N")) { return; } /** Clear user type, subtype */ String clearUserType = synDB.getConfigProperties("ENABLE_CLEAR_USER_TYPE"); if (clearUserType.equalsIgnoreCase("Y")) { synDB.clearUserTypeSubType(); synDB.deleteSubType(); synDB.deleteUserType(); } /** ****************************** SYNC USERS * ***************************** */ // Check disable get data from db2 String eableIncoming = synDB.getConfigProperties("ENABLE_DB2_DATA"); if (eableIncoming.equalsIgnoreCase("Y")) { lsSTGUsers = getCustomStgUsers(false); if (lsSTGUsers != null) { System.out.println("Sync to CUSTOM_STG_USER in keycloak"); for (Object[] row : lsSTGUsers) { System.out.println( "INSERT TO KEYCLOAK | ID_NRIC:" + row[0] + ", FIRST_NAME:" + row[1] + ", LAST_NAME:" + row[2] + ", MOBILE:" + row[3] + ", EMAIL:" + row[4] + ", ACCOUNT_STATUS:" + row[5] + ", AGENT_CODE:" + row[6] + ", AGENCY:" + row[7] + ", NEED2FA:" + row[8] + ", NEEDTNC:" + row[9] + ", USER_TYPE:" + row[10] + ", USER_SUB_TYPE:" + row[11] + ", CREATED_DATE :" + row[12] + row[12] + ", ROLE_NAME :" + row[13]); String id_nric = (String) row[0]; if (id_nric != null) { id_nric = id_nric.toUpperCase().trim(); } // a.ID_NRIC, FIRST_NAME, LAST_NAME, MOBILE, EMAIL, ACCOUNT_STATUS, AGENT_CODE, // AGENCY, NEED2FA, NEEDTNC, USER_TYPE, USER__SUB_TYPE, a.CREATED_DATE, b.ROLE_NAME String first_name = (String) row[1]; String last_name = (String) row[2]; String mobile = (String) row[3]; String email = (String) row[4]; String account_status = (String) row[5]; String agent_code = (String) row[6]; String agency = (String) row[7]; String need2fa = (String) row[8]; String needtnc = (String) row[9]; String user_type = (String) row[10]; if (user_type != null) { user_type = user_type.toUpperCase().trim(); } String user_sub_type = (String) row[11]; if (user_sub_type != null) { user_sub_type = user_sub_type.toUpperCase().trim(); } String roles = (String) row[13]; System.out.println("roles: " + roles); // check for user type String userTypeId = synDB.getUserTypeId(user_type); if (userTypeId.trim().equals("")) { userTypeId = genearateUDID(); synDB.insertCustomUserType(user_type, userTypeId); } // check for sub user type String userSubTypeId = synDB.getUserSubTypeId(user_sub_type); if (userSubTypeId.trim().equals("")) { userSubTypeId = genearateUDID(); synDB.insertCustomUserSubType(user_sub_type, userSubTypeId, userTypeId); } else { synDB.updateCustomUserSubType(user_sub_type, userSubTypeId, userTypeId); } System.out.println("userSubTypeId: " + userSubTypeId); System.out.println("userTypeId: " + userTypeId); System.out.println("Check user exist in user entity table"); String user_id = synDB.getUserEntityByUsername(id_nric); if (user_id.trim().equals("")) { System.out.println("======================"); System.out.println("Insert into UserEntity: " + id_nric); // Insert to User Entity table synDB.insertToUserEntity( id_nric, first_name, last_name, mobile, email, account_status, agent_code, agency, need2fa, needtnc, userTypeId, userSubTypeId); } else { synDB.updateUserEntityByID( first_name, last_name, mobile, email, account_status, agent_code, agency, userTypeId, userSubTypeId, user_id); } System.out.println("user_id=" + user_id); System.out.println("id_nric=" + id_nric); // manage app role List<String> lsUserTypeRoles = synDB.getRolesInUserType(user_type); // delete record by user_id and role_id in user_role_mapping synDB.deleteUserRoleMappingByUserIdRoleId(user_id); for (String rw : lsUserTypeRoles) { // add user_role_mapping by user_id and role_id (app role set in user type) String role_id = rw; System.out.println("========AND=====role_id: " + role_id); if (role_id != null && !role_id.trim().equals("")) { // add user_role_mapping by user_id and role_id (user role) synDB.insertUserRoleMapping(user_id, role_id); } } // manage user role StringTokenizer st = new StringTokenizer(roles, ","); while (st.hasMoreElements()) { String role = (String) st.nextElement(); if (role != null) { role = role.trim().toUpperCase(); // sync keycloak_role by role_name String keycloak_role_id = synDB.getKeycloakRoleByName(role); if (keycloak_role_id == null || (keycloak_role_id != null && keycloak_role_id.trim().equals(""))) { keycloak_role_id = genearateUDID(); String realmId = synDB.getPSERealmId(); synDB.insertKeycloakRole( keycloak_role_id, realmId, false, role, realmId, null, realmId, false); countRoleProcessed += 1; } System.out.println("========AND=====keycloak_role_id: " + keycloak_role_id); // add user_role_mapping by user_id and role_id (user role) synDB.insertUserRoleMapping(user_id, keycloak_role_id); } } } } sNoOfRecordInputUser += noOfRecordInputUser + sNewLine; } countUserNotProcessed = noOfRecordInputUser - countUserProcessed; long endTime = System.currentTimeMillis(); long totalTime = endTime - startTime; String sUserTypeNotExist = ""; /* if (sbUserTypeNotInsert != null) { sUserTypeNotExist = "UserType not exist: " + sbUserTypeNotInsert.toString() + sNewLine; }*/ String content = sNoOfRecordInputUser + sNoOfRecordPressedUser + sNoOfRecordNotPressedUser; content += sNoOfRecordInputRole + sNoOfRecordPressedRole + sNoOfRecordNotPressedRole; content += sUserTypeNotExist; content += "Process duration: " + totalTime + "(millisecond)"; String enableEmail = synDB.getConfigProperties("ENABLE_EMAIL"); // Check disable send mail if (enableEmail.equalsIgnoreCase("Y")) { try { System.out.println("Sending email..."); Mail mailBox = new Mail( emailFrom, emailTo, subject, host, port, username, password, content, logfilePath); mailBox.send(); System.out.println("Mail sent"); } catch (Exception ex) { ex.printStackTrace(); } } System.out.println("totalTime: " + totalTime); }
public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap properties = context.getMergedJobDataMap(); long language = 0; boolean isMultilanguage = false; try { language = Long.parseLong(properties.getString("language")); if (language == -1) { isMultilanguage = true; } } catch (Exception e4) { language = APILocator.getLanguageAPI().getDefaultLanguage().getId(); } String filePath = null; String logPath = ""; String fileName = ""; try { filePath = (String) properties.get("filePath"); logPath = pluginAPI.loadProperty("org.dotcms.plugins.contentImporter", "logFile"); File file = new File(filePath); fileName = file.getName(); int index = fileName.lastIndexOf("."); if (-1 < index) fileName = fileName.substring(0, index); } catch (Exception e) { } try { String structure = (String) properties.get("structure"); String[] fields = {}; if (UtilMethods.isSet(properties.get("fields"))) { String[] strFields = ((String) properties.get("fields")).split(","); List<String> longFields = new ArrayList<String>(strFields.length); for (String field : strFields) { longFields.add(field); } String[] tempArray = new String[longFields.size()]; for (int pos = 0; pos < longFields.size(); ++pos) { tempArray[pos] = longFields.get(pos); } fields = tempArray; } String reportEmail = (String) properties.get("reportEmail"); String csvSeparatorDelimiter = (String) properties.get("csvSeparatorDelimiter"); if (!UtilMethods.isSet(csvSeparatorDelimiter)) csvSeparatorDelimiter = ","; else csvSeparatorDelimiter = "" + csvSeparatorDelimiter.trim().charAt(0); String csvTextDelimiter = (String) properties.get("csvTextDelimiter"); if (UtilMethods.isSet(csvTextDelimiter)) csvTextDelimiter = "" + csvTextDelimiter.trim().charAt(0); boolean publishContent = new Boolean((String) properties.get("publishContent")); HashMap<String, List<String>> results = new HashMap<String, List<String>>(); results.put("warnings", new ArrayList<String>()); results.put("errors", new ArrayList<String>()); results.put("messages", new ArrayList<String>()); results.put("results", new ArrayList<String>()); File tempfile = new File(filePath); List<File> filesList = new ArrayList<File>(); if (!tempfile.exists()) { ((List<String>) results.get("errors")).add("File: " + filePath + " doesn't exist."); sendResults(results, reportEmail, fileName + " Import Results", logPath, fileName); } else if (tempfile.isDirectory()) { File[] files = tempfile.listFiles(); for (File f : files) { if (f.getName().toLowerCase().endsWith(".csv")) { filesList.add(f); } } } else { filesList.add(tempfile); } Collections.sort(filesList); for (File file : filesList) { if (!file.exists()) { ((List<String>) results.get("errors")).add("File: " + filePath + " doesn't exist."); sendResults(results, reportEmail, fileName + " Import Results", logPath, fileName); } else if (!file.isFile()) { ((List<String>) results.get("errors")).add(filePath + " isn't a file."); sendResults(results, reportEmail, fileName + " Import Results", logPath, fileName); } else if (!file.canRead()) { ((List<String>) results.get("errors")).add("File: " + filePath + " can't be readed."); sendResults(results, reportEmail, fileName + " Import Results", logPath, fileName); } else { Reader reader = null; CsvReader csvreader = null; try { File renameFile = new File(file.getPath() + ".lock"); boolean renamed = file.renameTo(renameFile); file = renameFile; reader = new InputStreamReader(new FileInputStream(file), Charset.forName("UTF-8")); csvreader = new CsvReader(reader, csvSeparatorDelimiter.charAt(0)); if (UtilMethods.isSet(csvTextDelimiter)) csvreader.setTextQualifier(csvTextDelimiter.charAt(0)); csvreader.setSafetySwitch(false); User user = APILocator.getUserAPI().getSystemUser(); if (csvreader.readHeaders()) { ContentletUtil contentletUtil = new ContentletUtil(reader, csvreader); results = contentletUtil.importFile( structure, fields, false, user, isMultilanguage, language, publishContent); } } catch (Exception e) { ((List<String>) results.get("errors")).add("Exception: " + e.toString()); Logger.error(ContentImporterThread.class, e.getMessage(), e); } finally { if (reader != null) { try { reader.close(); } catch (Exception e) { } } if (csvreader != null) { try { csvreader.close(); } catch (Exception e) { } } moveImportedFile(file.getPath()); sendResults(results, reportEmail, fileName + " Import Results", logPath, fileName); } } } } catch (Exception e1) { Logger.warn(this, e1.toString()); } }