@Test
  public void shouldLockPipelineWhenSchedulingIt() throws Exception {
    scheduleAndCompleteInitialPipelines();

    configHelper.lockPipeline("mingle");

    Material stubMaterial = new TestingMaterial();
    mingleConfig.setMaterialConfigs(new MaterialConfigs(stubMaterial.config()));

    assertThat(pipelineLockService.isLocked("mingle"), is(false));

    MaterialRevisions revisions = new MaterialRevisions();
    revisions.addRevision(
        stubMaterial,
        ((TestingMaterial) stubMaterial)
            .modificationsSince(null, null, subprocessExecutionContext));
    BuildCause buildCause = BuildCause.createWithModifications(revisions, "");
    dbHelper.saveMaterials(buildCause.getMaterialRevisions());
    Pipeline pipeline =
        instanceFactory.createPipelineInstance(
            mingleConfig,
            buildCause,
            new DefaultSchedulingContext(DEFAULT_APPROVED_BY),
            md5,
            new TimeProvider());
    pipelineService.save(pipeline);

    assertThat(pipelineLockService.isLocked("mingle"), is(true));
  }
 private Pipeline tryToScheduleAPipeline() {
   BuildCause buildCause =
       BuildCause.createWithModifications(modifyOneFile(preCondition.pipelineConfig()), "");
   dbHelper.saveMaterials(buildCause.getMaterialRevisions());
   pipelineScheduleQueue.schedule(preCondition.pipelineName, buildCause);
   scheduleService.autoSchedulePipelinesFromRequestBuffer();
   return pipelineService.mostRecentFullPipelineByName(preCondition.pipelineName);
 }
  @Before
  public void setup() throws Exception {
    diskSpaceSimulator = new DiskSpaceSimulator();
    new HgTestRepo("testHgRepo");

    svnRepository = new SvnTestRepo("testSvnRepo");

    dbHelper.onSetUp();
    configHelper.onSetUp();
    configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();

    repository = new SvnCommand(null, svnRepository.projectRepositoryUrl());

    goParentPipelineConfig =
        configHelper.addPipeline(
            GO_PIPELINE_UPSTREAM,
            STAGE_NAME,
            new MaterialConfigs(new GitMaterialConfig("foo-bar")),
            "unit");

    goPipelineConfig = configHelper.addPipeline(GO_PIPELINE_NAME, STAGE_NAME, repository, "unit");

    svnMaterialRevs = new MaterialRevisions();
    SvnMaterial svnMaterial = SvnMaterial.createSvnMaterialWithMock(repository);
    svnMaterialRevs.addRevision(
        svnMaterial,
        svnMaterial.latestModification(
            null, new ServerSubprocessExecutionContext(goConfigService, new SystemEnvironment())));

    final MaterialRevisions materialRevisions = new MaterialRevisions();
    SvnMaterial anotherSvnMaterial = SvnMaterial.createSvnMaterialWithMock(repository);
    materialRevisions.addRevision(
        anotherSvnMaterial,
        anotherSvnMaterial.latestModification(null, subprocessExecutionContext));

    transactionTemplate.execute(
        new TransactionCallbackWithoutResult() {
          @Override
          protected void doInTransactionWithoutResult(TransactionStatus status) {
            materialRepository.save(svnMaterialRevs);
          }
        });

    BuildCause buildCause = BuildCause.createWithModifications(svnMaterialRevs, "");

    mingleConfig =
        configHelper.addPipeline(
            MINGLE_PIPELINE_NAME,
            STAGE_NAME,
            repository,
            new Filter(new IgnoredFiles("**/*.doc")),
            "unit",
            "functional");
    latestPipeline = PipelineMother.schedule(this.mingleConfig, buildCause);
    latestPipeline = pipelineDao.saveWithStages(latestPipeline);
    dbHelper.passStage(latestPipeline.getStages().first());
    pipelineScheduleQueue.clear();
  }
Esempio n. 4
0
 private Pipeline scheduleWithFileChanges(PipelineConfig pipelineConfig) {
   BuildCause buildCause = BuildCause.createWithModifications(modifyOneFile(pipelineConfig), "");
   saveRevs(buildCause.getMaterialRevisions());
   return instanceFactory.createPipelineInstance(
       pipelineConfig,
       buildCause,
       new DefaultSchedulingContext(GoConstants.DEFAULT_APPROVED_BY),
       md5,
       new TimeProvider());
 }
  private void prepareAPipelineWithHistory() throws SQLException {
    MaterialRevisions materialRevisions = new MaterialRevisions();
    List<Modification> modifications =
        this.hgMaterial.latestModification(workingFolder, subprocessExecutionContext);
    materialRevisions.addRevision(this.hgMaterial, modifications);
    BuildCause buildCause = BuildCause.createWithModifications(materialRevisions, "");

    latestPipeline = PipelineMother.schedule(mingleConfig, buildCause);
    latestPipeline = dbHelper.savePipelineWithStagesAndMaterials(latestPipeline);
    dbHelper.passStage(latestPipeline.getStages().first());
  }
  @Test
  public void shouldScheduleJobForAllAgentsWhenToBeRunOnAllAgents() throws Exception {
    configHelper.addAgent("localhost", "uuid1");
    configHelper.addAgent("localhost", "uuid2");
    configHelper.addAgent("localhost", "uuid3");
    configHelper.addAgentToEnvironment("dev", "uuid1");
    configHelper.setRunOnAllAgents(
        CaseInsensitiveString.str(evolveConfig.name()), STAGE_NAME, "unit", true);

    Material stubMaterial = new TestingMaterial();
    evolveConfig.setMaterialConfigs(new MaterialConfigs(stubMaterial.config()));
    MaterialRevisions revisions = new MaterialRevisions();
    revisions.addRevision(
        stubMaterial,
        ((TestingMaterial) stubMaterial)
            .modificationsSince(null, null, subprocessExecutionContext));
    BuildCause buildCause = BuildCause.createWithModifications(revisions, "");
    dbHelper.saveMaterials(buildCause.getMaterialRevisions());

    Pipeline pipeline =
        instanceFactory.createPipelineInstance(
            evolveConfig,
            buildCause,
            new DefaultSchedulingContext(
                DEFAULT_APPROVED_BY,
                environmentConfigService.agentsForPipeline(evolveConfig.name())),
            md5,
            new TimeProvider());
    pipelineService.save(pipeline);

    Stage instance =
        scheduleService.scheduleStage(
            pipeline,
            STAGE_NAME,
            "anyone",
            new ScheduleService.NewStageInstanceCreator(goConfigService),
            new ScheduleService.ExceptioningErrorHandler());
    JobInstances scheduledJobs = instance.getJobInstances();
    assertThat(
        scheduledJobs.toArray(),
        hasItemInArray(
            hasProperty(
                "name", is(RunOnAllAgents.CounterBasedJobNameGenerator.appendMarker("unit", 1)))));
    assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("agentUuid", is("uuid2"))));
    assertThat(
        scheduledJobs.toArray(),
        hasItemInArray(
            hasProperty(
                "name", is(RunOnAllAgents.CounterBasedJobNameGenerator.appendMarker("unit", 2)))));
    assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("agentUuid", is("uuid3"))));
    assertThat(scheduledJobs.size(), is(2));
  }
 @Before
 public void setUp() {
   MaterialRevisions materialRevisions = multipleModifications();
   stage =
       StageMother.withOneScheduledBuild("stage", "job-that-will-fail", "job-that-will-pass", 1);
   modifications = BuildCause.createWithModifications(materialRevisions, "");
   pipeline = new Pipeline("pipeline", PipelineLabel.COUNT_TEMPLATE, modifications, stage);
   stage.setIdentifier(new StageIdentifier(pipeline, stage));
   for (JobInstance job : stage.getJobInstances()) {
     job.setIdentifier(new JobIdentifier(pipeline, stage, job));
   }
   pipeline.setId(PIPELINE_ID);
   pipeline.updateCounter(9);
 }
 private void runAndPass(MaterialRevisions mingleRev) {
   BuildCause buildCause = BuildCause.createWithModifications(mingleRev, "boozer");
   latestPipeline = PipelineMother.schedule(mingleConfig, buildCause);
   latestPipeline = pipelineDao.saveWithStages(latestPipeline);
   dbHelper.passStage(latestPipeline.getStages().first());
 }