@Test
  public void shouldCancelBuildBelongingToNonExistentPipelineWhenCreatingWork() throws Exception {
    fixture.createPipelineWithFirstStageScheduled();
    Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);

    ScheduledPipelineLoader scheduledPipelineLoader = mock(ScheduledPipelineLoader.class);
    when(scheduledPipelineLoader.pipelineWithPasswordAwareBuildCauseByBuildId(
            pipeline.getFirstStage().getJobInstances().first().getId()))
        .thenThrow(new PipelineNotFoundException("thrown by mockPipelineService"));

    GoConfigService mockGoConfigService = mock(GoConfigService.class);
    CruiseConfig config = configHelper.currentConfig();
    configHelper.removePipeline(fixture.pipelineName, config);
    when(mockGoConfigService.getCurrentConfig()).thenReturn(config);

    buildAssignmentService =
        new BuildAssignmentService(
            mockGoConfigService,
            jobInstanceService,
            scheduleService,
            agentService,
            environmentConfigService,
            timeProvider,
            transactionTemplate,
            scheduledPipelineLoader,
            pipelineService,
            builderFactory,
            agentRemoteHandler);
    buildAssignmentService.onTimer();

    AgentConfig agentConfig = AgentMother.localAgent();
    agentConfig.addResource(new Resource("some-other-resource"));

    try {
      buildAssignmentService.assignWorkToAgent(agent(agentConfig));
      fail("should have thrown PipelineNotFoundException");
    } catch (PipelineNotFoundException e) {
      // ok
    }

    pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);

    JobInstance job = pipeline.getFirstStage().getJobInstances().first();
    assertThat(job.getState(), is(JobState.Completed));
    assertThat(job.getResult(), is(JobResult.Cancelled));
    Stage stage = stageDao.findStageWithIdentifier(job.getIdentifier().getStageIdentifier());
    assertThat(stage.getState(), is(StageState.Cancelled));
    assertThat(stage.getResult(), is(StageResult.Cancelled));
  }
Exemple #2
0
  @Test
  public void shouldReturnTheRevisionsThatMatchTheGivenSearchString() {
    when(securityService.hasViewPermissionForPipeline("pavan", "pipeline")).thenReturn(true);
    LocalizedOperationResult operationResult = mock(LocalizedOperationResult.class);
    MaterialConfig materialConfig = mock(MaterialConfig.class);
    when(goConfigService.materialForPipelineWithFingerprint("pipeline", "sha"))
        .thenReturn(materialConfig);

    List<MatchedRevision> expected =
        Arrays.asList(
            new MatchedRevision(
                "23",
                "revision",
                "revision",
                "user",
                new DateTime(2009, 10, 10, 12, 0, 0, 0).toDate(),
                "comment"));
    when(materialRepository.findRevisionsMatching(materialConfig, "23")).thenReturn(expected);
    assertThat(
        materialService.searchRevisions(
            "pipeline",
            "sha",
            "23",
            new Username(new CaseInsensitiveString("pavan")),
            operationResult),
        is(expected));
  }
  @Test
  public void shouldCancelBuildsForDeletedJobsWhenPipelineConfigChanges() throws Exception {
    fixture = new PipelineWithTwoStages(materialRepository, transactionTemplate).usingTwoJobs();
    fixture.usingConfigHelper(configHelper).usingDbHelper(dbHelper).onSetUp();
    fixture.createPipelineWithFirstStageScheduled();

    buildAssignmentService.onTimer();
    configHelper.removeJob(fixture.pipelineName, fixture.devStage, fixture.JOB_FOR_DEV_STAGE);

    buildAssignmentService.onPipelineConfigChange(
        goConfigService
            .getCurrentConfig()
            .getPipelineConfigByName(new CaseInsensitiveString(fixture.pipelineName)),
        "g1");

    Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
    JobInstance deletedJob =
        pipeline.getFirstStage().getJobInstances().getByName(fixture.JOB_FOR_DEV_STAGE);
    assertThat(deletedJob.getState(), is(JobState.Completed));
    assertThat(deletedJob.getResult(), is(JobResult.Cancelled));
    JobInstance retainedJob =
        pipeline.getFirstStage().getJobInstances().getByName(fixture.DEV_STAGE_SECOND_JOB);
    assertThat(retainedJob.getState(), is(JobState.Scheduled));
    assertThat(retainedJob.getResult(), is(JobResult.Unknown));
  }
 @After
 public void teardown() throws Exception {
   goCache.clear();
   agentService.clearAll();
   fixture.onTearDown();
   dbHelper.onTearDown();
   configHelper.onTearDown();
   FileUtil.deleteFolder(goConfigService.artifactsDir());
   agentAssignment.clear();
   agentRemoteHandler.connectedAgents().clear();
 }
  @Test
  public void shouldCancelOutOfDateBuilds() throws Exception {
    fixture.createPipelineWithFirstStageScheduled();
    buildAssignmentService.onTimer();
    configHelper.removeStage(fixture.pipelineName, fixture.devStage);

    buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());

    Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
    JobInstance job = pipeline.getFirstStage().getJobInstances().first();
    assertThat(job.getState(), is(JobState.Completed));
    assertThat(job.getResult(), is(JobResult.Cancelled));
  }
  @Test
  public void shouldRemoveAllJobPlansThatAreNotInConfig() {
    CruiseConfig oldConfig = goConfigService.getCurrentConfig();
    ScheduleTestUtil.AddedPipeline p1 =
        u.saveConfigWith("p1", "s1", u.m(new HgMaterial("hg", null)));
    Pipeline p1_1 =
        instanceFactory.createPipelineInstance(
            p1.config,
            modifyNoFiles(p1.config),
            new DefaultSchedulingContext(DEFAULT_APPROVED_BY),
            md5,
            new TimeProvider());
    ScheduleTestUtil.AddedPipeline p2 =
        u.saveConfigWith("p2", "s1", u.m(new HgMaterial("hg", null)));
    Pipeline p2_1 =
        instanceFactory.createPipelineInstance(
            p2.config,
            modifyNoFiles(p2.config),
            new DefaultSchedulingContext(DEFAULT_APPROVED_BY),
            md5,
            new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(p1_1);
    dbHelper.savePipelineWithStagesAndMaterials(p2_1);
    CruiseConfig cruiseConfig = goConfigService.getCurrentConfig();
    buildAssignmentService.onConfigChange(cruiseConfig);
    buildAssignmentService.onTimer();

    List<JobPlan> plans =
        (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans");
    assertThat(plans.isEmpty(), is(false));
    assertThat(plans.size(), is(2));

    configHelper.writeConfigFile(oldConfig);
    plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans");
    assertThat("Actual size is " + plans.size(), plans.isEmpty(), is(true));
  }
Exemple #7
0
  @Test
  public void shouldReturnNotFoundIfTheMaterialDoesNotBelongToTheGivenPipeline() {
    when(securityService.hasViewPermissionForPipeline("pavan", "pipeline")).thenReturn(true);
    LocalizedOperationResult operationResult = mock(LocalizedOperationResult.class);

    when(goConfigService.materialForPipelineWithFingerprint("pipeline", "sha"))
        .thenThrow(new RuntimeException("Not found"));

    materialService.searchRevisions(
        "pipeline", "sha", "23", new Username(new CaseInsensitiveString("pavan")), operationResult);
    verify(operationResult)
        .notFound(
            LocalizedMessage.materialWithFingerPrintNotFound("pipeline", "sha"),
            HealthStateType.general(HealthStateScope.forPipeline("pipeline")));
  }
  @Before
  public void setUp() throws Exception {
    configHelper = new GoConfigFileHelper().usingCruiseConfigDao(goConfigDao);
    configHelper.onSetUp();

    dbHelper.onSetUp();
    fixture = new PipelineWithTwoStages(materialRepository, transactionTemplate);
    fixture.usingConfigHelper(configHelper).usingDbHelper(dbHelper).onSetUp();

    repository = new SvnCommand(null, testRepo.projectRepositoryUrl());
    evolveConfig = configHelper.addPipeline("evolve", STAGE_NAME, repository, "unit");
    configHelper.addPipeline("anotherPipeline", STAGE_NAME, repository, "anotherTest");
    configHelper.addPipeline("thirdPipeline", STAGE_NAME, repository, "yetAnotherTest");
    goConfigService.forceNotifyListeners();
    goCache.clear();
    u = new ScheduleTestUtil(transactionTemplate, materialRepository, dbHelper, configHelper);

    agent = new AgentStub();
  }
  @Test
  public void shouldNotAssignCancelledJob() throws Exception {
    AgentIdentifier instance = agent(AgentMother.localAgent());
    Pipeline pipeline =
        instanceFactory.createPipelineInstance(
            evolveConfig,
            modifyNoFiles(evolveConfig),
            new DefaultSchedulingContext(DEFAULT_APPROVED_BY),
            md5,
            new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(pipeline);
    buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());
    JobInstance job = buildOf(pipeline);
    job.cancel();
    jobInstanceDao.updateStateAndResult(job);

    assertThat(
        buildAssignmentService.assignWorkToAgent(instance),
        is((Work) BuildAssignmentService.NO_WORK));
  }
 @Test
 public void shouldNotAssignWorkWhenPipelineScheduledWithStaleMaterials() {
   AgentIdentifier instance = agent(AgentMother.localAgent());
   Pipeline pipeline =
       instanceFactory.createPipelineInstance(
           evolveConfig,
           modifyNoFiles(evolveConfig),
           new DefaultSchedulingContext(DEFAULT_APPROVED_BY),
           md5,
           new TimeProvider());
   dbHelper.savePipelineWithStagesAndMaterials(pipeline);
   evolveConfig.setMaterialConfigs(new MaterialConfigs(new HgMaterialConfig("foo", null)));
   configHelper.removePipeline(CaseInsensitiveString.str(evolveConfig.name()));
   configHelper.addPipeline(evolveConfig);
   buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());
   JobInstance job = buildOf(pipeline);
   jobInstanceDao.updateStateAndResult(job);
   assertThat(
       buildAssignmentService.assignWorkToAgent(instance),
       is((Work) BuildAssignmentService.NO_WORK));
 }
  @Test
  public void shouldRescheduleAbandonedBuild() throws SQLException {
    AgentIdentifier instance = agent(AgentMother.localAgent());
    Pipeline pipeline =
        instanceFactory.createPipelineInstance(
            evolveConfig,
            modifyNoFiles(evolveConfig),
            new DefaultSchedulingContext(DEFAULT_APPROVED_BY),
            md5,
            new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(pipeline);
    buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());
    buildAssignmentService.onTimer();
    buildAssignmentService.assignWorkToAgent(instance);
    long firstAssignedBuildId = buildOf(pipeline).getId();

    // somehow agent abandoned its original build...

    buildAssignmentService.assignWorkToAgent(instance);
    JobInstance reloaded = jobInstanceDao.buildByIdWithTransitions(firstAssignedBuildId);
    assertThat(reloaded.getState(), is(JobState.Rescheduled));
    assertThat(reloaded.isIgnored(), is(true));
  }
  /**
   * (uppest/2/uppest-stage/1) |------------------> upper-peer ------- |
   * ...................................|............................................... | . | . [
   * uppest-stage ............................|...................... {bar.zip
   * uppest/upper-peer/downer} V . uppest uppest-stage-2 ------> upper ------> downer ------>
   * downest {foo.zip uppest/upper/downer} (uppest/1/uppest-stage-2/1) uppest-stage-3 ]
   *
   * <p>.... :: fetch artifact call ---> :: material dependency
   */
  @Test
  public void shouldCreateWork_withAncestorFetchArtifactCalls_resolvedToRelevantStage()
      throws Exception {
    configHelper.addPipeline("uppest", "uppest-stage");
    configHelper.addStageToPipeline("uppest", "uppest-stage-2");
    PipelineConfig uppest = configHelper.addStageToPipeline("uppest", "uppest-stage-3");

    configHelper.addPipeline("upper", "upper-stage");
    DependencyMaterial upper_sMaterial =
        new DependencyMaterial(
            new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage-2"));
    PipelineConfig upper =
        configHelper.setMaterialConfigForPipeline("upper", upper_sMaterial.config());

    configHelper.addPipeline("upper-peer", "upper-peer-stage");
    DependencyMaterial upperPeer_sMaterial =
        new DependencyMaterial(
            new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage"));
    PipelineConfig upperPeer =
        configHelper.setMaterialConfigForPipeline("upper-peer", upperPeer_sMaterial.config());

    configHelper.addPipeline("downer", "downer-stage");
    DependencyMaterial downer_sUpperMaterial =
        new DependencyMaterial(
            new CaseInsensitiveString("upper"), new CaseInsensitiveString("upper-stage"));
    configHelper.setMaterialConfigForPipeline("downer", downer_sUpperMaterial.config());
    DependencyMaterial downer_sUpperPeerMaterial =
        new DependencyMaterial(
            new CaseInsensitiveString("upper-peer"), new CaseInsensitiveString("upper-peer-stage"));
    PipelineConfig downer =
        configHelper.addMaterialToPipeline("downer", downer_sUpperPeerMaterial.config());

    configHelper.addPipeline("downest", "downest-stage");
    DependencyMaterial downest_sMaterial =
        new DependencyMaterial(
            new CaseInsensitiveString("downer"), new CaseInsensitiveString("downer-stage"));
    configHelper.setMaterialConfigForPipeline("downest", downest_sMaterial.config());
    Tasks allFetchTasks = new Tasks();
    allFetchTasks.add(
        new FetchTask(
            new CaseInsensitiveString("uppest/upper/downer"),
            new CaseInsensitiveString("uppest-stage"),
            new CaseInsensitiveString("unit"),
            "foo.zip",
            "bar"));
    allFetchTasks.add(
        new FetchTask(
            new CaseInsensitiveString("uppest/upper-peer/downer"),
            new CaseInsensitiveString("uppest-stage"),
            new CaseInsensitiveString("unit"),
            "bar.zip",
            "baz"));
    configHelper.replaceAllJobsInStage(
        "downest",
        "downest-stage",
        new JobConfig(
            new CaseInsensitiveString("fetcher"),
            new Resources("fetcher"),
            new ArtifactPlans(),
            allFetchTasks));
    PipelineConfig downest =
        goConfigService
            .getCurrentConfig()
            .pipelineConfigByName(new CaseInsensitiveString("downest"));

    DefaultSchedulingContext defaultSchedulingCtx =
        new DefaultSchedulingContext(DEFAULT_APPROVED_BY);
    Pipeline uppestInstanceForUpper =
        instanceFactory.createPipelineInstance(
            uppest, modifySomeFiles(uppest), defaultSchedulingCtx, md5, new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpper);
    dbHelper.passStage(uppestInstanceForUpper.findStage("uppest-stage"));
    Stage upper_sMaterialStage =
        dbHelper.scheduleStage(
            uppestInstanceForUpper, uppest.getStage(new CaseInsensitiveString("uppest-stage-2")));
    dbHelper.passStage(upper_sMaterialStage);

    Pipeline uppestInstanceForUpperPeer =
        instanceFactory.createPipelineInstance(
            uppest,
            modifySomeFiles(uppest),
            new DefaultSchedulingContext("super-hero"),
            md5,
            new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpperPeer);
    Stage upperPeer_sMaterialStage = uppestInstanceForUpperPeer.findStage("uppest-stage");
    dbHelper.passStage(upperPeer_sMaterialStage);

    Pipeline upperInstance =
        instanceFactory.createPipelineInstance(
            upper,
            buildCauseForDependency(upper_sMaterial, upper_sMaterialStage),
            defaultSchedulingCtx,
            md5,
            new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(upperInstance);
    Stage downer_sUpperMaterialStage = upperInstance.findStage("upper-stage");
    dbHelper.passStage(downer_sUpperMaterialStage);

    Pipeline upperPeerInstance =
        instanceFactory.createPipelineInstance(
            upperPeer,
            buildCauseForDependency(upperPeer_sMaterial, upperPeer_sMaterialStage),
            defaultSchedulingCtx,
            md5,
            new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(upperPeerInstance);
    Stage downer_sUpperPeerMaterialStage = upperPeerInstance.findStage("upper-peer-stage");
    dbHelper.passStage(downer_sUpperPeerMaterialStage);

    MaterialRevisions downer_sMaterialRevisions =
        new MaterialRevisions(
            materialRevisionForDownstream(downer_sUpperMaterial, downer_sUpperMaterialStage),
            materialRevisionForDownstream(
                downer_sUpperPeerMaterial, downer_sUpperPeerMaterialStage));

    Pipeline downerInstance =
        instanceFactory.createPipelineInstance(
            downer,
            BuildCause.createManualForced(downer_sMaterialRevisions, loserUser),
            defaultSchedulingCtx,
            md5,
            new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(downerInstance);
    Stage downest_sMaterialStage = downerInstance.findStage("downer-stage");
    dbHelper.passStage(downest_sMaterialStage);

    Pipeline downestInstance =
        instanceFactory.createPipelineInstance(
            downest,
            buildCauseForDependency(downest_sMaterial, downest_sMaterialStage),
            defaultSchedulingCtx,
            md5,
            new TimeProvider());
    dbHelper.savePipelineWithStagesAndMaterials(downestInstance);

    buildAssignmentService.onTimer();
    AgentConfig agentConfig = AgentMother.localAgent();
    agentConfig.addResource(new Resource("fetcher"));
    BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(agentConfig));

    List<Builder> builders = work.getAssignment().getBuilders();
    FetchArtifactBuilder fooZipFetch = (FetchArtifactBuilder) builders.get(0);
    assertThat(fooZipFetch.artifactLocator(), is("uppest/1/uppest-stage/latest/unit/foo.zip"));
    FetchArtifactBuilder barZipFetch = (FetchArtifactBuilder) builders.get(1);
    assertThat(barZipFetch.artifactLocator(), is("uppest/2/uppest-stage/1/unit/bar.zip"));
  }