private JobRegion fixJobRegion(JobRegion jobRegion, JobInstance job) { if (!NumberUtils.isDigits(jobRegion.getUsers())) { long users = TestParamUtil.evaluateExpression( jobRegion.getUsers(), job.getExecutionTime(), job.getSimulationTime(), job.getRampTime()); jobRegion.setUsers(Long.toString(users)); } return jobRegion; }
private Set<? extends RegionRequest> getRegions(JobInstance job) { Set<JobRegion> ret = new HashSet<JobRegion>(); JobRegionDao dao = new JobRegionDao(); HashSet<VMRegion> regionSet = new HashSet<VMRegion>(); for (EntityVersion version : job.getJobRegionVersions()) { JobRegion jobRegion = null; if (version.getObjectId() > 0 && version.getVersionId() > 0) { try { jobRegion = dao.findRevision(version.getObjectId(), version.getVersionId()); } catch (Exception e) { LOG.error("Error getting region revision: " + e.toString(), e); } } if (jobRegion != null) { ret.add(fixJobRegion(jobRegion, job)); if (regionSet.contains(jobRegion.getRegion())) { LOG.warn("attempt to add multiple regions to job"); } else { regionSet.add(jobRegion.getRegion()); } } else { LOG.warn( "Attempt to add jobRegion version that does not exist. id = " + version.getObjectId() + " : version = " + version.getVersionId()); jobRegion = dao.findById(version.getObjectId()); if (jobRegion != null) { ret.add(fixJobRegion(jobRegion, job)); if (regionSet.contains(jobRegion.getRegion())) { LOG.warn("attempt to add multiple regions to job"); } else { regionSet.add(jobRegion.getRegion()); } } else { LOG.warn( "Cannot find job region with id " + version.getObjectId() + ". Returning current job Regions."); Workload workload = new WorkloadDao().findById(job.getWorkloadId()); ret = workload.getJobConfiguration().getJobRegions(); for (JobRegion region : ret) { fixJobRegion(region, job); } break; // throw new RuntimeException("Cannot find job region with id " + version.getObjectId()); } } } ret = JobRegionDao.cleanRegions(ret); return ret; }
/** * @param all * @return */ private List<JobReportData> getJobReportData(List<JobInstance> all) { Set<Integer> workloadIds = new HashSet<Integer>(); for (JobInstance job : all) { workloadIds.add(job.getWorkloadId()); } List<Workload> workloads = workloadIds.isEmpty() ? new ArrayList<Workload>() : new WorkloadDao().findForIds(new ArrayList<Integer>(workloadIds)); Map<Integer, Project> projectMap = new HashMap<Integer, Project>(); for (Workload w : workloads) { projectMap.put(w.getId(), w.getProject()); } List<JobReportData> ret = new ArrayList<JobReportData>(); for (JobInstance job : all) { Project p = projectMap.get(job.getWorkloadId()); String name = p != null ? p.getName() : "N/A (Project Deleted)"; ret.add(new JobReportData(name, job)); } return ret; }
/** @param jobEvent */ public void observe(@Observes(notifyObserver = Reception.ALWAYS) JobEvent jobEvent) { if (jobEvent.getEvent() == JobLifecycleEvent.AGENT_STARTED) { JobInstance job = dao.findById(Integer.valueOf(jobEvent.getJobId())); for (EntityVersion version : job.getNotificationVersions()) { JobNotification not = notificationDao.findById(version.getObjectId()); if (not != null && not.getLifecycleEvents() != null && not.getLifecycleEvents().contains(JobLifecycleEvent.AGENT_EXCESSIVE_CPU)) { LOG.info("Adding watches for job " + job.getId()); addWatches(job, not); break; } } } else if (jobEvent.getEvent() == JobLifecycleEvent.JOB_FINISHED) { JobInstance job = dao.findById(Integer.valueOf(jobEvent.getJobId())); for (EntityVersion version : job.getNotificationVersions()) { JobNotification not = notificationDao.findById(version.getObjectId()); if (not != null && not.getLifecycleEvents() != null && not.getLifecycleEvents().contains(JobLifecycleEvent.AGENT_EXCESSIVE_CPU)) { LOG.info("Adding watches for job " + job.getId()); removeWatches(job, not); break; } } } }
/** * @param job * @return */ private Set<Integer> getDataFileIds(JobInstance job) { DataFileDao dataFileDao = new DataFileDao(); Set<Integer> ret = new HashSet<Integer>(); for (EntityVersion version : job.getDataFileVersions()) { DataFile dataFile = dataFileDao.findRevision(version.getObjectId(), version.getVersionId()); if (dataFile != null) { ret.add(dataFile.getId()); } else { LOG.warn("Attempt to add dataFile that does not exist."); } } return ret; }
/** * @param job * @return */ private Set<? extends Notification> getNotifications(JobInstance job) { HashSet<JobNotification> ret = new HashSet<JobNotification>(); JobNotificationDao dao = new JobNotificationDao(); for (EntityVersion version : job.getNotificationVersions()) { JobNotification notification = dao.findRevision(version.getObjectId(), version.getVersionId()); if (notification != null) { ret.add(notification); } else { LOG.warn("Attempt to add Notification that does not exist."); } } return ret; }
/** * @param jobId * @param mailService */ private void checkJobStatus(String jobId, MailService mailService) { CloudVmStatusContainer container = tracker.getVmStatusForJob(jobId); LOG.info( "Checking Job Status to see if we can kill reporting instances. Container=" + container); if (container != null) { if (container.getEndTime() != null) { JobInstanceDao dao = new JobInstanceDao(); // hack to see if this is an automatino job // set the status of the JobInstance to finished. JobInstance finishedJob = dao.findById(Integer.valueOf(jobId)); if (finishedJob.getEndTime() == null) { finishedJob.setEndTime(new Date()); finishedJob.setStatus(JobQueueStatus.Completed); dao.saveOrUpdate(finishedJob); } List<JobQueueStatus> statuses = Arrays.asList(new JobQueueStatus[] {JobQueueStatus.Running, JobQueueStatus.Starting}); List<JobInstance> instances = dao.getForStatus(statuses); LOG.info( "Checking Job Status to see if we can kill reporting instances. found running instances: " + instances.size()); boolean killModal = true; boolean killNonRegional = true; for (JobInstance job : instances) { CloudVmStatusContainer statusForJob = tracker.getVmStatusForJob(Integer.toString(job.getId())); if (!jobId.equals(Integer.toString(job.getId())) && statusForJob != null && statusForJob.getEndTime() == null) { LOG.info("Found another job that is not finished: " + job); } } if (killNonRegional || killModal) { for (CloudVmStatusContainer statusForJob : tracker.getAllJobs()) { if (statusForJob.getEndTime() == null && !NumberUtils.isNumber(statusForJob.getJobId())) { killNonRegional = false; killModal = false; LOG.info( "Cannot kill Reporting instances because of automation job id: " + statusForJob.getJobId()); } } } } else { LOG.info("Container does not have end time set so cannot kill reporting instaces."); } } }
/** @param all */ private void filterDate(List<JobInstance> all) { Date date = jobReportOptions.getStartTime(); if (date != null) { for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext(); ) { JobInstance job = iter.next(); Date st = job.getStartTime() != null ? job.getStartTime() : job.getCreated(); if (!date.before(st)) { iter.remove(); } } } date = jobReportOptions.getEndTime(); if (date != null) { for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext(); ) { JobInstance job = iter.next(); Date st = job.getStartTime() != null ? job.getStartTime() : job.getCreated(); if (!date.after(st)) { iter.remove(); } } } if (NumberUtils.isDigits(jobReportOptions.getMinUsers())) { try { int users = Integer.parseInt(jobReportOptions.getMinUsers()); for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext(); ) { JobInstance job = iter.next(); if (job.getTotalVirtualUsers() < users) { iter.remove(); } } } catch (NumberFormatException e) { LOG.warn("Error with min users value of " + jobReportOptions.getMinUsers()); } } if (NumberUtils.isDigits(jobReportOptions.getMaxUsers())) { try { int users = Integer.parseInt(jobReportOptions.getMaxUsers()); for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext(); ) { JobInstance job = iter.next(); if (job.getTotalVirtualUsers() > users) { iter.remove(); } } } catch (NumberFormatException e) { LOG.warn("Error with max users value of " + jobReportOptions.getMaxUsers()); } } if (NumberUtils.isDigits(jobReportOptions.getJobIdStart())) { try { int jobIdStart = NumberUtils.toInt(jobReportOptions.getJobIdStart()); for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext(); ) { JobInstance job = iter.next(); if (job.getId() < jobIdStart) { iter.remove(); } } } catch (NumberFormatException e) { LOG.warn("Error with max users value of " + jobReportOptions.getMaxUsers()); } } if (NumberUtils.isDigits(jobReportOptions.getJobIdEnd())) { try { int jobIdStart = NumberUtils.toInt(jobReportOptions.getJobIdEnd()); for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext(); ) { JobInstance job = iter.next(); if (job.getId() > jobIdStart) { iter.remove(); } } } catch (NumberFormatException e) { LOG.warn("Error with max users value of " + jobReportOptions.getMaxUsers()); } } }
private JobRequest jobToJobRequest(JobInstance job) { Builder builder = JobRequestImpl.builder(); builder .withBaselineVirtualUsers(job.getBaselineVirtualUsers()) .withId(Integer.toString(job.getId())) .withIncrementStrategy(job.getIncrementStrategy()) .withLocation(job.getLocation()) .withRampTime(job.getRampTime()) .withLoggingProfile(job.getLoggingProfile()) .withStopBehavior(job.getStopBehavior()) .withReportingMode(job.getReportingMode()) .withUseEips(job.isUseEips()) .withVmInstanceType(job.getVmInstanceType()) .withnumUsersPerAgent(job.getNumUsersPerAgent()) .withSimulationTime(job.getSimulationTime()) .withStatus(job.getStatus()) .withTerminationPolicy(job.getTerminationPolicy()) .withUserIntervalIncrement(job.getUserIntervalIncrement()); builder.withRegions(getRegions(job)); builder.withNofitications(getNotifications(job)); builder.withDataFileIds(getDataFileIds(job)); if (job.getTerminationPolicy() == TerminationPolicy.script) { builder.withSimulationTime(0); } Workload workload = new WorkloadDao().findById(job.getWorkloadId()); builder.withScriptXmlUrl(buildScriptXml(Integer.toString(job.getId()), workload)); return builder.build(); }