@Test public void shouldForceStagePlanWithModificationsSinceLast() throws Exception { Pipeline completedMingle = scheduleAndCompleteInitialPipelines(); pipelineDao.loadPipeline(completedMingle.getId()); TestingMaterial testingMaterial = new TestingMaterial(); mingleConfig.setMaterialConfigs(new MaterialConfigs(testingMaterial.config())); MaterialRevisions revisions = new MaterialRevisions(); revisions.addRevision( testingMaterial, testingMaterial.modificationsSince(null, null, subprocessExecutionContext)); BuildCause buildCause = BuildCause.createManualForced(revisions, Username.ANONYMOUS); dbHelper.saveMaterials(buildCause.getMaterialRevisions()); Pipeline forcedPipeline = instanceFactory.createPipelineInstance( mingleConfig, buildCause, new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); pipelineService.save(forcedPipeline); verifyMingleScheduledWithModifications(); }
@Test public void shouldLockPipelineWhenSchedulingIt() throws Exception { scheduleAndCompleteInitialPipelines(); configHelper.lockPipeline("mingle"); Material stubMaterial = new TestingMaterial(); mingleConfig.setMaterialConfigs(new MaterialConfigs(stubMaterial.config())); assertThat(pipelineLockService.isLocked("mingle"), is(false)); MaterialRevisions revisions = new MaterialRevisions(); revisions.addRevision( stubMaterial, ((TestingMaterial) stubMaterial) .modificationsSince(null, null, subprocessExecutionContext)); BuildCause buildCause = BuildCause.createWithModifications(revisions, ""); dbHelper.saveMaterials(buildCause.getMaterialRevisions()); Pipeline pipeline = instanceFactory.createPipelineInstance( mingleConfig, buildCause, new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); pipelineService.save(pipeline); assertThat(pipelineLockService.isLocked("mingle"), is(true)); }
public Stage scheduleStage(String pipelineName, String stageName, SchedulingContext context) { PipelineConfig pipelineConfig = getCurrentConfig().pipelineConfigByName(new CaseInsensitiveString(pipelineName)); return instanceFactory.createStageInstance( pipelineConfig, new CaseInsensitiveString(stageName), context, getCurrentConfig().getMd5(), clock); }
@Test public void shouldScheduleJobForAllAgentsWhenToBeRunOnAllAgents() throws Exception { configHelper.addAgent("localhost", "uuid1"); configHelper.addAgent("localhost", "uuid2"); configHelper.addAgent("localhost", "uuid3"); configHelper.addAgentToEnvironment("dev", "uuid1"); configHelper.setRunOnAllAgents( CaseInsensitiveString.str(evolveConfig.name()), STAGE_NAME, "unit", true); Material stubMaterial = new TestingMaterial(); evolveConfig.setMaterialConfigs(new MaterialConfigs(stubMaterial.config())); MaterialRevisions revisions = new MaterialRevisions(); revisions.addRevision( stubMaterial, ((TestingMaterial) stubMaterial) .modificationsSince(null, null, subprocessExecutionContext)); BuildCause buildCause = BuildCause.createWithModifications(revisions, ""); dbHelper.saveMaterials(buildCause.getMaterialRevisions()); Pipeline pipeline = instanceFactory.createPipelineInstance( evolveConfig, buildCause, new DefaultSchedulingContext( DEFAULT_APPROVED_BY, environmentConfigService.agentsForPipeline(evolveConfig.name())), md5, new TimeProvider()); pipelineService.save(pipeline); Stage instance = scheduleService.scheduleStage( pipeline, STAGE_NAME, "anyone", new ScheduleService.NewStageInstanceCreator(goConfigService), new ScheduleService.ExceptioningErrorHandler()); JobInstances scheduledJobs = instance.getJobInstances(); assertThat( scheduledJobs.toArray(), hasItemInArray( hasProperty( "name", is(RunOnAllAgents.CounterBasedJobNameGenerator.appendMarker("unit", 1))))); assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("agentUuid", is("uuid2")))); assertThat( scheduledJobs.toArray(), hasItemInArray( hasProperty( "name", is(RunOnAllAgents.CounterBasedJobNameGenerator.appendMarker("unit", 2))))); assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("agentUuid", is("uuid3")))); assertThat(scheduledJobs.size(), is(2)); }
@Test public void shouldRemoveAllJobPlansThatAreNotInConfig() { CruiseConfig oldConfig = goConfigService.getCurrentConfig(); ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", "s1", u.m(new HgMaterial("hg", null))); Pipeline p1_1 = instanceFactory.createPipelineInstance( p1.config, modifyNoFiles(p1.config), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", "s1", u.m(new HgMaterial("hg", null))); Pipeline p2_1 = instanceFactory.createPipelineInstance( p2.config, modifyNoFiles(p2.config), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(p1_1); dbHelper.savePipelineWithStagesAndMaterials(p2_1); CruiseConfig cruiseConfig = goConfigService.getCurrentConfig(); buildAssignmentService.onConfigChange(cruiseConfig); buildAssignmentService.onTimer(); List<JobPlan> plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans"); assertThat(plans.isEmpty(), is(false)); assertThat(plans.size(), is(2)); configHelper.writeConfigFile(oldConfig); plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans"); assertThat("Actual size is " + plans.size(), plans.isEmpty(), is(true)); }
private long rerunJob( String jobName, PipelineConfig pipelineConfig, Pipeline previousSuccessfulBuildWithOlderPackageConfig) { Stage stage = instanceFactory.createStageForRerunOfJobs( previousSuccessfulBuildWithOlderPackageConfig.getFirstStage(), asList(jobName), new DefaultSchedulingContext(), pipelineConfig.getFirstStageConfig(), new TimeProvider(), configHelper.getGoConfigDao().md5OfConfigFile()); stage = stageService.save(previousSuccessfulBuildWithOlderPackageConfig, stage); return stage.getFirstJob().getId(); }
@Test public void shouldNotAssignCancelledJob() throws Exception { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance( evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); JobInstance job = buildOf(pipeline); job.cancel(); jobInstanceDao.updateStateAndResult(job); assertThat( buildAssignmentService.assignWorkToAgent(instance), is((Work) BuildAssignmentService.NO_WORK)); }
@Test public void shouldCreateWorkWithFetchMaterialsFlagFromStageConfig() throws Exception { evolveConfig.getFirstStageConfig().setFetchMaterials(true); Pipeline pipeline1 = instanceFactory.createPipelineInstance( evolveConfig, modifySomeFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline1); buildAssignmentService.onTimer(); BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgent())); assertThat( "should have set fetchMaterials on assignment", work.getAssignment().getPlan().shouldFetchMaterials(), is(true)); }
@Test public void shouldNotAssignWorkWhenPipelineScheduledWithStaleMaterials() { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance( evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); evolveConfig.setMaterialConfigs(new MaterialConfigs(new HgMaterialConfig("foo", null))); configHelper.removePipeline(CaseInsensitiveString.str(evolveConfig.name())); configHelper.addPipeline(evolveConfig); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); JobInstance job = buildOf(pipeline); jobInstanceDao.updateStateAndResult(job); assertThat( buildAssignmentService.assignWorkToAgent(instance), is((Work) BuildAssignmentService.NO_WORK)); }
@Test public void shouldRescheduleAbandonedBuild() throws SQLException { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance( evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); buildAssignmentService.onTimer(); buildAssignmentService.assignWorkToAgent(instance); long firstAssignedBuildId = buildOf(pipeline).getId(); // somehow agent abandoned its original build... buildAssignmentService.assignWorkToAgent(instance); JobInstance reloaded = jobInstanceDao.buildByIdWithTransitions(firstAssignedBuildId); assertThat(reloaded.getState(), is(JobState.Rescheduled)); assertThat(reloaded.isIgnored(), is(true)); }
@Test public void shouldBeAbleToSerializeAndDeserializeBuildWork() throws Exception { Pipeline pipeline1 = instanceFactory.createPipelineInstance( evolveConfig, modifySomeFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline1); buildAssignmentService.onTimer(); BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgent())); BuildWork deserialized = (BuildWork) SerializationTester.serializeAndDeserialize(work); assertThat( deserialized.getAssignment().materialRevisions(), is(work.getAssignment().materialRevisions())); assertThat(deserialized.getAssignment(), is(work.getAssignment())); assertThat(deserialized, is(work)); }
/** * (uppest/2/uppest-stage/1) |------------------> upper-peer ------- | * ...................................|............................................... | . | . [ * uppest-stage ............................|...................... {bar.zip * uppest/upper-peer/downer} V . uppest uppest-stage-2 ------> upper ------> downer ------> * downest {foo.zip uppest/upper/downer} (uppest/1/uppest-stage-2/1) uppest-stage-3 ] * * <p>.... :: fetch artifact call ---> :: material dependency */ @Test public void shouldCreateWork_withAncestorFetchArtifactCalls_resolvedToRelevantStage() throws Exception { configHelper.addPipeline("uppest", "uppest-stage"); configHelper.addStageToPipeline("uppest", "uppest-stage-2"); PipelineConfig uppest = configHelper.addStageToPipeline("uppest", "uppest-stage-3"); configHelper.addPipeline("upper", "upper-stage"); DependencyMaterial upper_sMaterial = new DependencyMaterial( new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage-2")); PipelineConfig upper = configHelper.setMaterialConfigForPipeline("upper", upper_sMaterial.config()); configHelper.addPipeline("upper-peer", "upper-peer-stage"); DependencyMaterial upperPeer_sMaterial = new DependencyMaterial( new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage")); PipelineConfig upperPeer = configHelper.setMaterialConfigForPipeline("upper-peer", upperPeer_sMaterial.config()); configHelper.addPipeline("downer", "downer-stage"); DependencyMaterial downer_sUpperMaterial = new DependencyMaterial( new CaseInsensitiveString("upper"), new CaseInsensitiveString("upper-stage")); configHelper.setMaterialConfigForPipeline("downer", downer_sUpperMaterial.config()); DependencyMaterial downer_sUpperPeerMaterial = new DependencyMaterial( new CaseInsensitiveString("upper-peer"), new CaseInsensitiveString("upper-peer-stage")); PipelineConfig downer = configHelper.addMaterialToPipeline("downer", downer_sUpperPeerMaterial.config()); configHelper.addPipeline("downest", "downest-stage"); DependencyMaterial downest_sMaterial = new DependencyMaterial( new CaseInsensitiveString("downer"), new CaseInsensitiveString("downer-stage")); configHelper.setMaterialConfigForPipeline("downest", downest_sMaterial.config()); Tasks allFetchTasks = new Tasks(); allFetchTasks.add( new FetchTask( new CaseInsensitiveString("uppest/upper/downer"), new CaseInsensitiveString("uppest-stage"), new CaseInsensitiveString("unit"), "foo.zip", "bar")); allFetchTasks.add( new FetchTask( new CaseInsensitiveString("uppest/upper-peer/downer"), new CaseInsensitiveString("uppest-stage"), new CaseInsensitiveString("unit"), "bar.zip", "baz")); configHelper.replaceAllJobsInStage( "downest", "downest-stage", new JobConfig( new CaseInsensitiveString("fetcher"), new Resources("fetcher"), new ArtifactPlans(), allFetchTasks)); PipelineConfig downest = goConfigService .getCurrentConfig() .pipelineConfigByName(new CaseInsensitiveString("downest")); DefaultSchedulingContext defaultSchedulingCtx = new DefaultSchedulingContext(DEFAULT_APPROVED_BY); Pipeline uppestInstanceForUpper = instanceFactory.createPipelineInstance( uppest, modifySomeFiles(uppest), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpper); dbHelper.passStage(uppestInstanceForUpper.findStage("uppest-stage")); Stage upper_sMaterialStage = dbHelper.scheduleStage( uppestInstanceForUpper, uppest.getStage(new CaseInsensitiveString("uppest-stage-2"))); dbHelper.passStage(upper_sMaterialStage); Pipeline uppestInstanceForUpperPeer = instanceFactory.createPipelineInstance( uppest, modifySomeFiles(uppest), new DefaultSchedulingContext("super-hero"), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpperPeer); Stage upperPeer_sMaterialStage = uppestInstanceForUpperPeer.findStage("uppest-stage"); dbHelper.passStage(upperPeer_sMaterialStage); Pipeline upperInstance = instanceFactory.createPipelineInstance( upper, buildCauseForDependency(upper_sMaterial, upper_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(upperInstance); Stage downer_sUpperMaterialStage = upperInstance.findStage("upper-stage"); dbHelper.passStage(downer_sUpperMaterialStage); Pipeline upperPeerInstance = instanceFactory.createPipelineInstance( upperPeer, buildCauseForDependency(upperPeer_sMaterial, upperPeer_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(upperPeerInstance); Stage downer_sUpperPeerMaterialStage = upperPeerInstance.findStage("upper-peer-stage"); dbHelper.passStage(downer_sUpperPeerMaterialStage); MaterialRevisions downer_sMaterialRevisions = new MaterialRevisions( materialRevisionForDownstream(downer_sUpperMaterial, downer_sUpperMaterialStage), materialRevisionForDownstream( downer_sUpperPeerMaterial, downer_sUpperPeerMaterialStage)); Pipeline downerInstance = instanceFactory.createPipelineInstance( downer, BuildCause.createManualForced(downer_sMaterialRevisions, loserUser), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(downerInstance); Stage downest_sMaterialStage = downerInstance.findStage("downer-stage"); dbHelper.passStage(downest_sMaterialStage); Pipeline downestInstance = instanceFactory.createPipelineInstance( downest, buildCauseForDependency(downest_sMaterial, downest_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(downestInstance); buildAssignmentService.onTimer(); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("fetcher")); BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(agentConfig)); List<Builder> builders = work.getAssignment().getBuilders(); FetchArtifactBuilder fooZipFetch = (FetchArtifactBuilder) builders.get(0); assertThat(fooZipFetch.artifactLocator(), is("uppest/1/uppest-stage/latest/unit/foo.zip")); FetchArtifactBuilder barZipFetch = (FetchArtifactBuilder) builders.get(1); assertThat(barZipFetch.artifactLocator(), is("uppest/2/uppest-stage/1/unit/bar.zip")); }