private void runWithContext() { HaWorkVO work = null; try { s_logger.trace("Checking the database"); work = _haDao.take(_serverId); if (work == null) { try { synchronized (this) { wait(_timeToSleep); } return; } catch (final InterruptedException e) { s_logger.info("Interrupted"); return; } } NDC.push("work-" + work.getId()); s_logger.info("Processing " + work); try { final WorkType wt = work.getWorkType(); Long nextTime = null; if (wt == WorkType.Migration) { nextTime = migrate(work); } else if (wt == WorkType.HA) { nextTime = restart(work); } else if (wt == WorkType.Stop || wt == WorkType.CheckStop || wt == WorkType.ForceStop) { nextTime = stopVM(work); } else if (wt == WorkType.Destroy) { nextTime = destroyVM(work); } else { assert false : "How did we get here with " + wt.toString(); return; } if (nextTime == null) { s_logger.info("Completed " + work); work.setStep(Step.Done); } else { s_logger.info("Rescheduling " + work + " to try again at " + new Date(nextTime << 10)); work.setTimeToTry(nextTime); work.setServerId(null); work.setDateTaken(null); } } catch (Exception e) { s_logger.error("Terminating " + work, e); work.setStep(Step.Error); } _haDao.update(work.getId(), work); } catch (final Throwable th) { s_logger.error("Caught this throwable, ", th); } finally { if (work != null) { NDC.pop(); } } }
public static WorkCategory fromWorkType(WorkType type) { if (PUBLICATION.getSubTypes().contains(type)) return PUBLICATION; else if (CONFERENCE.getSubTypes().contains(type)) return CONFERENCE; else if (INTELLECTUAL_PROPERTY.getSubTypes().contains(type)) return INTELLECTUAL_PROPERTY; else if (OTHER_OUTPUT.getSubTypes().contains(type)) return OTHER_OUTPUT; else throw new IllegalArgumentException("Invalid work type provided: " + type.name()); }