@Test public void testScheduleDag() { HashSet<VM> vms = new HashSet<VM>(); for (int i = 0; i < 10; i++) { VMStaticParams vmStaticParams = VMStaticParams.getDefaults(); VM vm = new VM(vmStaticParams, cloudsim); vms.add(vm); cloudsim.send(engine.getId(), cloud.getId(), 0.0, WorkflowEvent.VM_LAUNCH, vm); } DAG dag = new DAG(); for (int i = 0; i < 100; i++) { Task task = new Task("TASK" + i, "transformation", (i % 10), VMType.DEFAULT_VM_TYPE); dag.addTask(task); } List<DAG> dags = new ArrayList<DAG>(); dags.add(dag); // FIXME (_mequrel): looks awkward, a comment should be added or some logic inversed new EnsembleManager(dags, engine, cloudsim); cloudsim.startSimulation(); assertEquals(vms.size(), engine.getAvailableVMs().size()); assertEquals(0, engine.getQueuedJobs().size()); jobLog.printJobs("testEnsembleDynamicSchedulerDag"); }
@Test public void testScheduleDag100() { HashSet<VM> vms = new HashSet<VM>(); for (int i = 0; i < 10; i++) { VMStaticParams vmStaticParams = VMStaticParams.getDefaults(); VM vm = new VM(vmStaticParams, cloudsim); vms.add(vm); cloudsim.send(engine.getId(), cloud.getId(), 0.0, WorkflowEvent.VM_LAUNCH, vm); } DAG dag = DAGParser.parseDAG(new File("dags/CyberShake_100.dag")); List<DAG> dags = new ArrayList<DAG>(); dags.add(dag); // FIXME (_mequrel): looks awkward, a comment should be added or some logic inversed new EnsembleManager(dags, engine, cloudsim); cloudsim.startSimulation(); assertEquals(vms.size(), engine.getAvailableVMs().size()); assertEquals(0, engine.getQueuedJobs().size()); jobLog.printJobs("testEnsembleDynamicSchedulerDag_CyberShake_100"); }
@Override public void endTask(String taskId, String transitionName) { WorkflowTaskInstance taskInstance = workflowComponent.getTaskInstanceById(taskId); taskInstance.getProperties().put(WorkflowConstants.VAR_OUTCOME, transitionName); workflowComponent.updateTaskInstance(taskInstance); workflowComponent.processTaskToEnd(taskId, transitionName); }
@Override public List<WorkflowTaskInstance> getTaskInstancesByWorkflowInstanceId( String workflowInstanceId) { List<WorkflowTaskInstance> workflowInstances = workflowComponent.getTasksByActiveWorkflowId(workflowInstanceId); return workflowInstances; }
@Test public void testScheduleVMS() { HashSet<VM> vms = new HashSet<VM>(); for (int i = 0; i < 10; i++) { VMStaticParams vmStaticParams = VMStaticParams.getDefaults(); VM vm = new VM(vmStaticParams, cloudsim); vm.setProvisioningDelay(0.0); vm.setDeprovisioningDelay(0.0); vms.add(vm); cloudsim.send(engine.getId(), cloud.getId(), 0.1, WorkflowEvent.VM_LAUNCH, vm); } cloudsim.startSimulation(); assertEquals(vms.size(), engine.getAvailableVMs().size()); }
@Override public WorkflowTaskInstance getTaskInstanceByTaskId(String id, FETCH_TYPE fetch_type) { WorkflowTaskInstance taskInstance = workflowComponent.getTaskInstanceById(id); if (fetch_type.equals(FETCH_TYPE.FORUPDATE)) return taskInstance; else return new WorkflowTaskInstanceInfo(taskInstance); }
public StartWorkflowResponse startWorkflow(StartWorkflowRequest request) { String workflowClassId = workflowClassDao.findByName(request.getWorkflowClass()).get_id(); Workflow wf = workflowEngine.createNewWorkflow(workflowClassId, request.getAttributes()); Event e = new Event(); e.setEventType(WorkflowEventType.INITIATE_WORKFLOW); e.setWorkflowId(wf.get_id()); e.setAttributes(request.getAttributes()); // todo should the attributes be in event as well? workflowEngine.submitEvent(e); log.info("New workflow initiated: " + wf); StartWorkflowResponse response = new StartWorkflowResponse(); response.setWorkflowId(wf.get_id()); response.setStatus(EndpointResponseStatus.OK); return response; }
@Override public WorkflowInstance getWorkflowInstanceById( String workflowinstanceId, FetchModel.FETCH_TYPE fetch_type) { WorkflowInstance workflowInstance = workflowComponent.getWorkflowInstanceById(workflowinstanceId); if (fetch_type.equals(FetchModel.FETCH_TYPE.FORUPDATE)) return workflowInstance; else return new WorkflowInstanceInfo(workflowInstance); }
public EndpointResponse submitEvent(SubmitEventRequest request) { try { workflowEngine.submitEvent(request.getEvent()); return EndpointHelper.createResponse(EndpointResponse.class, request); } catch (Exception e) { return EndpointHelper.createErrorResponse(EndpointResponse.class, request, e); } }
@Override public void deployWorkflow(WorkflowTemplate workflowTemplate) { Serializable definition = (Serializable) this.workflowTransformService.fromObjectToWorkflowDefinition(workflowTemplate); try { workflowComponent.deployWorkflow(workflowTemplate, definition); } catch (Exception e) { throw new WorkflowException(e); } }
@Before public void setUp() { // TODO(_mequrel_): change to IoC in the future or to mock cloudsim = new CloudSimWrapper(); cloudsim.init(); storageManager = new VoidStorageManager(cloudsim); provisioner = null; scheduler = new EnsembleDynamicScheduler(cloudsim); engine = new WorkflowEngine(new SimpleJobFactory(1000), provisioner, scheduler, cloudsim); cloud = new Cloud(cloudsim); jobLog = new WorkflowLog(cloudsim); engine.addJobListener(jobLog); }
@Override public void stopWorkflow(String processInstanceId) { workflowComponent.stopWorkflow(processInstanceId); // Create the workflow history event WorkflowHistoryDAO workflowHistoryDao = (WorkflowHistoryDAO) Context.getInstance().getBean(WorkflowHistoryDAO.class); WorkflowHistory transaction = new WorkflowHistory(); WorkflowInstance instance = getWorkflowInstanceById(processInstanceId, FETCH_TYPE.INFO); WorkflowPersistenceTemplateDAO workflowTemplateDao = (WorkflowPersistenceTemplateDAO) Context.getInstance().getBean(WorkflowPersistenceTemplateDAO.class); WorkflowPersistenceTemplate template = workflowTemplateDao.findByName(instance.getName()); transaction.setTemplateId(template.getId()); transaction.setInstanceId(processInstanceId); transaction.setDate(new Date()); transaction.setSessionId(SessionManagement.getCurrentUserSessionId()); transaction.setEvent(WorkflowHistory.EVENT_WORKFLOW_END); transaction.setComment(""); transaction.setUser(SessionManagement.getUser()); workflowHistoryDao.store(transaction); }
@Override public void updateWorkflow(WorkflowInstance workflowInstance) { workflowComponent.updateWorkflowInstance(workflowInstance); }
@Override public List<WorkflowDefinition> getAllDefinitions() { List<WorkflowDefinition> processDefinitions = workflowComponent.getAllProcessDefinitions(); return processDefinitions; }
@Override public List<WorkflowTaskInstance> getAllTaskInstances() { List<WorkflowTaskInstance> taskInstances = workflowComponent.getAllTaskInstances(); return taskInstances; }
public WorkflowInstance startWorkflow( WorkflowDefinition workflowDefinition, Map<String, Serializable> properties) { WorkflowPersistenceTemplateDAO workflowTemplateDao = (WorkflowPersistenceTemplateDAO) Context.getInstance().getBean(WorkflowPersistenceTemplateDAO.class); try { WorkflowInstance workflowInstance = null; if (workflowDefinition != null && !workflowDefinition.getDefinitionId().isEmpty()) { log.info("workflowComponent: " + workflowComponent); log.info("workflowDefinition: " + workflowDefinition); log.info("workflowDefinition.getDefinitionId()" + workflowDefinition.getDefinitionId()); log.info("properties size: " + properties.size()); workflowInstance = workflowComponent.startWorkflow(workflowDefinition.getDefinitionId(), properties); // Create the workflow history event WorkflowHistoryDAO workflowHistoryDao = (WorkflowHistoryDAO) Context.getInstance().getBean(WorkflowHistoryDAO.class); WorkflowHistory transaction = new WorkflowHistory(); WorkflowPersistenceTemplate template = workflowTemplateDao.findByName(workflowInstance.getName()); transaction.setTemplateId(template.getId()); transaction.setTemplateId(template.getId()); transaction.setInstanceId(workflowInstance.getId()); transaction.setDate(new Date()); transaction.setSessionId(SessionManagement.getCurrentUserSessionId()); transaction.setEvent(WorkflowHistory.EVENT_WORKFLOW_START); transaction.setComment(""); transaction.setUser(SessionManagement.getUser()); workflowHistoryDao.store(transaction); // Create a workflow history for each document associated to // this // workflow instance Set<Long> docIds = (Set<Long>) workflowInstance.getProperties().get(WorkflowConstants.VAR_DOCUMENTS); for (Long docId : docIds) { // Create the workflow history event WorkflowHistory docAppended = new WorkflowHistory(); docAppended.setTemplateId(template.getId()); docAppended.setInstanceId(workflowInstance.getId()); docAppended.setDate(new Date()); docAppended.setSessionId(SessionManagement.getCurrentUserSessionId()); docAppended.setEvent(WorkflowHistory.EVENT_WORKFLOW_DOCAPPENDED); docAppended.setDocId(docId); docAppended.setComment(""); docAppended.setUser(SessionManagement.getUser()); workflowHistoryDao.store(docAppended); } } else { Messages.addLocalizedWarn("noselection"); } return workflowInstance; } finally { workflowTemplateDao.fixConversionField(); } }
@Override public void undeployWorkflow(String processId) { workflowComponent.undeployWorkflow(processId); }
public void signal(String workflowInstanceId) { workflowComponent.signal(workflowInstanceId); }
public void submitDAG(DAGJob dagJob) { // Submit the dag to the workflow engine sendNow(engine.getId(), WorkflowEvent.DAG_SUBMIT, dagJob); }
@Override public void updateWorkflow(WorkflowTaskInstance taskInstance) { workflowComponent.updateTaskInstance(taskInstance); }
public static void main(String[] args) { try { Log.disable(); // First step: Initialize the WorkflowSim package. /** Should change this based on real physical path */ String daxPath = "E:\\PhD\\ComplexCloudSim\\config\\dax\\Montage_1000.xml"; File daxFile = new File(daxPath); if (!daxFile.exists()) { Log.printLine( "Warning: Please replace daxPath with the physical path in your working environment!"); return; } Parameters.PlanningAlgorithm pln_method = Parameters.PlanningAlgorithm.INVALID; ReplicaCatalog.FileSystem file_system = ReplicaCatalog.FileSystem.SHARED; OverheadParameters op = new OverheadParameters(0, null, null, null, null, 0); ClusteringParameters.ClusteringMethod method = ClusteringParameters.ClusteringMethod.NONE; ClusteringParameters cp = new ClusteringParameters(0, 0, method, null); // For each scheduling algorithm (FCFS,RR,MinMin,MaxMin), run 100 times for (int sche = 0; sche < 4; sche++) { Parameters.SchedulingAlgorithm sch_method; switch (sche) { case 0: sch_method = Parameters.SchedulingAlgorithm.FCFS; break; case 1: sch_method = Parameters.SchedulingAlgorithm.ROUNDROBIN; break; case 2: sch_method = Parameters.SchedulingAlgorithm.MINMIN; break; case 3: sch_method = Parameters.SchedulingAlgorithm.MAXMIN; break; default: sch_method = Parameters.SchedulingAlgorithm.FCFS; } for (int runs = 0; runs < numRuns; runs++) { Parameters.init(numVMs, daxPath, null, null, op, cp, sch_method, pln_method, null, 0); ReplicaCatalog.init(file_system); // before creating any entities. int num_user = 1; // number of grid users Calendar calendar = Calendar.getInstance(); boolean trace_flag = false; // mean trace events // Initialize the CloudSim library CloudSim.init(num_user, calendar, trace_flag); ComplexDatacenter datacenter0 = createDatacenter("Datacenter_0"); /** Create a WorkflowPlanner with one schedulers. */ WorkflowPlanner wfPlanner = new WorkflowPlanner("planner_0", 1); /** Create a WorkflowEngine. */ WorkflowEngine wfEngine = wfPlanner.getWorkflowEngine(); /** * Create a list of VMs.The userId of a vm is basically the id of the scheduler that * controls this vm. */ List<ComplexVM> vmlist0 = createVM(wfEngine.getSchedulerId(0)); /** Submits this list of vms to this WorkflowEngine. */ wfEngine.submitVmList(vmlist0, 0); /** Binds the data centers with the scheduler. */ wfEngine.bindSchedulerDatacenter(datacenter0.getId(), 0); CloudSim.startSimulation(); List<Job> outputList0 = wfEngine.getJobsReceivedList(); CloudSim.stopSimulation(); switch (sche) { case 0: FCFSResult[runs] = wfEngine.getWorkflowFinishTime(); break; case 1: RoundRobinResult[runs] = wfEngine.getWorkflowFinishTime(); break; case 2: MinMinResult[runs] = wfEngine.getWorkflowFinishTime(); break; case 3: MaxMinResult[runs] = wfEngine.getWorkflowFinishTime(); break; default: FCFSResult[runs] = wfEngine.getWorkflowFinishTime(); break; } } Log.enable(); Log.printLine( "------ " + numVMs + " VMs " + numRuns + " Runs with Damage Ratio " + damageRatio + "------"); Log.printLine(">> FCFS"); printResult(FCFSResult); Log.printLine(">> RoundRobin"); printResult(RoundRobinResult); Log.printLine(">> MinMin"); printResult(MinMinResult); Log.printLine(">> MaxMin"); printResult(MaxMinResult); } } catch (Exception e) { Log.printLine("The simulation has been terminated due to an unexpected error"); } }