コード例 #1
0
  @Test
  public void
      shouldCancelAScheduledJobInCaseThePipelineIsRemovedFromTheConfig_SpecificallyAPipelineRenameToADifferentCaseAndStageNameToADifferentName()
          throws Exception {
    Material hgMaterial = new HgMaterial("url", "folder");
    String[] hgRevs = new String[] {"h1"};
    u.checkinInOrder(hgMaterial, hgRevs);

    ScheduleTestUtil.AddedPipeline p1 =
        u.saveConfigWith("PIPELINE_WHICH_WILL_EVENTUALLY_CHANGE_CASE", u.m(hgMaterial));

    u.scheduleWith(p1, hgRevs);
    ScheduleTestUtil.AddedPipeline renamedPipeline =
        u.renamePipelineAndFirstStage(
            p1,
            "pipeline_which_will_eventually_change_case",
            "NEW_RANDOM_STAGE_NAME" + UUID.randomUUID());

    Pipeline p1_2 = u.scheduleWith(renamedPipeline, hgRevs);
    CruiseConfig cruiseConfig = configHelper.load();
    buildAssignmentService.onTimer(); // To Reload Job Plans
    buildAssignmentService.onConfigChange(cruiseConfig);

    Stages allStages = stageDao.findAllStagesFor(p1_2.getName(), p1_2.getCounter());
    assertThat(
        allStages.byName(CaseInsensitiveString.str(p1.config.first().name())).getState(),
        is(StageState.Cancelled));
  }
コード例 #2
0
 @Test
 public void shouldNotScheduleActivePipeline() throws Exception {
   Pipeline pipeline = PipelineMother.building(mingleConfig);
   pipeline = dbHelper.savePipelineWithStagesAndMaterials(pipeline);
   Pipeline newPipeline = manualSchedule(CaseInsensitiveString.str(mingleConfig.name()));
   assertThat(newPipeline.getId(), is(pipeline.getId()));
 }
コード例 #3
0
  @Test
  public void shouldCancelBuildsForDeletedJobsWhenPipelineConfigChanges() throws Exception {
    fixture = new PipelineWithTwoStages(materialRepository, transactionTemplate).usingTwoJobs();
    fixture.usingConfigHelper(configHelper).usingDbHelper(dbHelper).onSetUp();
    fixture.createPipelineWithFirstStageScheduled();

    buildAssignmentService.onTimer();
    configHelper.removeJob(fixture.pipelineName, fixture.devStage, fixture.JOB_FOR_DEV_STAGE);

    buildAssignmentService.onPipelineConfigChange(
        goConfigService
            .getCurrentConfig()
            .getPipelineConfigByName(new CaseInsensitiveString(fixture.pipelineName)),
        "g1");

    Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
    JobInstance deletedJob =
        pipeline.getFirstStage().getJobInstances().getByName(fixture.JOB_FOR_DEV_STAGE);
    assertThat(deletedJob.getState(), is(JobState.Completed));
    assertThat(deletedJob.getResult(), is(JobResult.Cancelled));
    JobInstance retainedJob =
        pipeline.getFirstStage().getJobInstances().getByName(fixture.DEV_STAGE_SECOND_JOB);
    assertThat(retainedJob.getState(), is(JobState.Scheduled));
    assertThat(retainedJob.getResult(), is(JobResult.Unknown));
  }
コード例 #4
0
  @Test
  public void shouldForceStagePlanWithModificationsSinceLast() throws Exception {
    Pipeline completedMingle = scheduleAndCompleteInitialPipelines();
    pipelineDao.loadPipeline(completedMingle.getId());
    TestingMaterial testingMaterial = new TestingMaterial();
    mingleConfig.setMaterialConfigs(new MaterialConfigs(testingMaterial.config()));

    MaterialRevisions revisions = new MaterialRevisions();
    revisions.addRevision(
        testingMaterial,
        testingMaterial.modificationsSince(null, null, subprocessExecutionContext));
    BuildCause buildCause = BuildCause.createManualForced(revisions, Username.ANONYMOUS);
    dbHelper.saveMaterials(buildCause.getMaterialRevisions());
    Pipeline forcedPipeline =
        instanceFactory.createPipelineInstance(
            mingleConfig,
            buildCause,
            new DefaultSchedulingContext(DEFAULT_APPROVED_BY),
            md5,
            new TimeProvider());

    pipelineService.save(forcedPipeline);

    verifyMingleScheduledWithModifications();
  }
コード例 #5
0
  @Test
  public void shouldGetAPresenterWithLabelAndRelevantBuildPlansAndPipelineNameAndId()
      throws Exception {
    StageJsonPresentationModel presenter =
        new StageJsonPresentationModel(pipeline, stage, null, new Agents());
    Map json = presenter.toJson();

    new JsonTester(json)
        .shouldContain(
            "{ 'pipelineName' : 'pipeline',"
                + "  'stageName' : 'stage',"
                + "  'builds' : ["
                + "    { 'name' : 'job-that-will-fail' },"
                + "    { 'name' : 'job-that-will-pass' },"
                + "    { 'name' : 'scheduledBuild' }"
                + "  ],"
                + " 'current_label' : '"
                + pipeline.getLabel()
                + "', "
                + " 'pipelineCounter' : '"
                + pipeline.getCounter()
                + "', "
                + " 'pipelineCounterOrLabel' : '"
                + pipeline.getIdentifier().instanceIdentifier()
                + "', "
                + " 'id' : '1' "
                + "}");
    assertFalse(
        "JSON shouldn't contain last_successful_label",
        json.toString().contains("last_successful_label"));
  }
コード例 #6
0
ファイル: Launcher.java プロジェクト: alainloisel/bluima
  /**
   * Parse this pipeline and run it
   *
   * @return true if no error during processing
   * @throws ParseException
   */
  public static void runPipeline(File scriptFile, List<String> cliArgs)
      throws IOException, UIMAException, ParseException {
    if (!scriptFile.exists()) {
      throw new IOException("Script file does not exist (" + scriptFile.getAbsolutePath() + ")");
    }

    LOG.info(
        "Parsing pipeline script at '{}'",
        scriptFile.getAbsolutePath() + " \n with CLI parameters: " + join(cliArgs, ", "));
    Pipeline pipeline = null;
    try {
      pipeline = PipelineScriptParser.parse(scriptFile, cliArgs);
    } catch (ParseException e) {
      throw new ParseException(
          "\nERROR parsing '"
              + scriptFile.getName()
              + "'\n"
              + e.getMessage()
              + "\n(see the README.txt for the pipeline script format)",
          e.getErrorOffset());
    }

    LOG.info("Successfully parsed pipeline script, now starting pipeline...");
    LOG.info("*************************************************************");
    pipeline.run();
    // will be printed if no exception.
    // used in pipeline tests, do not change
    System.out.println(OK_MESSAGE);
  }
コード例 #7
0
 public List<MaterialRevision> addDependencyRevisionModification(
     List<MaterialRevision> materialRevisions,
     DependencyMaterial dependencyMaterial,
     Pipeline... upstreams) {
   String stageName = CaseInsensitiveString.str(dependencyMaterial.getStageName());
   String label = upstreams[0].getLabel();
   List<Modification> modifications = new ArrayList<Modification>();
   for (Pipeline upstream : upstreams) {
     modifications.add(
         new Modification(
             new Date(),
             DependencyMaterialRevision.create(
                     CaseInsensitiveString.str(dependencyMaterial.getPipelineName()),
                     upstream.getCounter(),
                     label,
                     stageName,
                     upstream.findStage(stageName).getCounter())
                 .getRevision(),
             label,
             upstream.getId()));
   }
   MaterialRevision depRev =
       addRevisionsWithModifications(
           dependencyMaterial, modifications.toArray(new Modification[0]));
   materialRevisions.add(depRev);
   return Arrays.asList(depRev);
 }
コード例 #8
0
  @Test
  public void shouldScheduleIfAgentMatchingResources() throws Exception {
    JobConfig plan =
        evolveConfig
            .findBy(new CaseInsensitiveString(STAGE_NAME))
            .jobConfigByInstanceName("unit", true);
    plan.addResource("some-resource");

    scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY);

    AgentConfig agentConfig = AgentMother.localAgent();
    agentConfig.addResource(new Resource("some-resource"));

    buildAssignmentService.onTimer();
    Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig));
    assertThat(work, is(not((Work) BuildAssignmentService.NO_WORK)));

    Pipeline pipeline =
        pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name()));
    JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit");

    JobPlan loadedPlan = jobInstanceDao.loadPlan(job.getId());
    assertThat(loadedPlan.getResources(), is((List<Resource>) plan.resources()));

    assertThat(job.getState(), is(JobState.Assigned));
    assertThat(job.getAgentUuid(), is(agentConfig.getUuid()));
  }
コード例 #9
0
ファイル: MRPipelineIT.java プロジェクト: rdblue/crunch
  @Test
  public void testPGroupedTableToMultipleOutputs() throws IOException {
    Pipeline pipeline = new MRPipeline(MRPipelineIT.class, tmpDir.getDefaultConfiguration());
    PGroupedTable<String, String> groupedLineTable =
        pipeline
            .readTextFile(tmpDir.copyResourceFileName("set1.txt"))
            .by(IdentityFn.<String>getInstance(), Writables.strings())
            .groupByKey();

    PTable<String, String> ungroupedTableA = groupedLineTable.ungroup();
    PTable<String, String> ungroupedTableB = groupedLineTable.ungroup();

    File outputDirA = tmpDir.getFile("output_a");
    File outputDirB = tmpDir.getFile("output_b");

    pipeline.writeTextFile(ungroupedTableA, outputDirA.getAbsolutePath());
    pipeline.writeTextFile(ungroupedTableB, outputDirB.getAbsolutePath());
    PipelineResult result = pipeline.done();
    for (StageResult stageResult : result.getStageResults()) {
      assertTrue(stageResult.getStageName().length() > 1);
      assertTrue(stageResult.getStageId().length() > 1);
    }

    // Verify that output from a single PGroupedTable can be sent to multiple collections
    assertTrue(new File(outputDirA, "part-r-00000").exists());
    assertTrue(new File(outputDirB, "part-r-00000").exists());
  }
コード例 #10
0
 @Test
 public void shouldForceFirstStagePlanWhenOtherStageIsRunning() throws Exception {
   pipelineWithTwoStages = new PipelineWithTwoStages(materialRepository, transactionTemplate);
   pipelineWithTwoStages.usingDbHelper(dbHelper).usingConfigHelper(configHelper).onSetUp();
   pipelineWithTwoStages.createPipelineWithFirstStagePassedAndSecondStageRunning();
   Pipeline pipeline = manualSchedule(pipelineWithTwoStages.pipelineName);
   assertThat(pipeline.getFirstStage().isActive(), is(true));
 }
コード例 #11
0
 private void verifyMingleScheduledWithModifications() {
   Pipeline scheduledPipeline =
       pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(mingleConfig.name()));
   BuildCause buildCause = scheduledPipeline.getBuildCause();
   assertThat(buildCause.getMaterialRevisions().totalNumberOfModifications(), is(3));
   JobInstance instance = scheduledPipeline.getFirstStage().getJobInstances().first();
   assertThat(instance.getState(), is(JobState.Scheduled));
 }
コード例 #12
0
 public Stage saveBuildingStage(String pipelineName, String stageName) throws SQLException {
   Pipeline pipeline = saveTestPipeline(pipelineName, stageName);
   Stage stage = saveBuildingStage(pipeline.getStages().byName(stageName));
   for (JobInstance job : stage.getJobInstances()) {
     job.setIdentifier(new JobIdentifier(pipeline, stage, job));
   }
   return stage;
 }
コード例 #13
0
ファイル: PlumberTest.java プロジェクト: liubo404/enhydrator
 @Test
 public void writeAndReadConfiguration() {
   Plumber plumber = Plumber.createWithDefaultPath();
   Pipeline origin = PipelineTest.getJDBCPipeline();
   plumber.intoConfiguration(origin);
   Pipeline deserialized = plumber.fromConfiguration(origin.getName());
   assertNotSame(deserialized, origin);
   assertThat(deserialized, is(origin));
 }
コード例 #14
0
 /**
  * Converts a JSON string into an object containing pipeline template information.
  *
  * @param pipelineTemplate A JSON string representing the pipeline template.
  * @return The pipeline template info object.
  * @throws JsonSyntaxException Something is wrong with the JSON syntax. .
  */
 @SuppressWarnings("unused")
 public static Pipeline templateFromJson(final String pipelineTemplate) {
   checkOnPipelineMembers(pipelineTemplate);
   Pipeline pipeline = gson.fromJson(pipelineTemplate, Pipeline.class);
   checkOnRequestsMembers(toJson(pipeline.getSerializedRequests()));
   String invalid = pipeline.isValid();
   if (!invalid.isEmpty()) {
     throw new JsonSyntaxException(invalid);
   }
   return gson.fromJson(pipelineTemplate, Pipeline.class);
 }
コード例 #15
0
  private void assertPipelinesScheduled() {
    Pipeline minglePipeline =
        pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(mingleConfig.name()));
    Stage mingleStage = minglePipeline.getFirstStage();
    assertThat(mingleStage.getName(), is(STAGE_NAME));
    assertThat(mingleStage.getJobInstances().size(), is(2));
    JobInstance mingleJob = mingleStage.getJobInstances().first();
    assertThat(mingleJob.getState(), is(JobState.Scheduled));

    assertPipelineScheduled(evolveConfig);
    assertPipelineScheduled(goConfig);
  }
コード例 #16
0
ファイル: PipelineTest.java プロジェクト: jiaqi/cyclopsgroup
  public void testRun() throws Exception {
    List<String> values = new ArrayList<String>();
    Pipeline p = new Pipeline();
    p.addValve(new MockValve(values, "aaa"));
    p.addValve(new MockValve(values, "bbb"));

    p.run(null);

    assertEquals(2, values.size());
    assertEquals("aaa", values.get(0));
    assertEquals("bbb", values.get(1));
  }
コード例 #17
0
ファイル: PlumberTest.java プロジェクト: liubo404/enhydrator
  @Test
  public void readConfigurationFromInputStream() throws FileNotFoundException {
    Plumber output = Plumber.createWithDefaultPath();
    Pipeline origin = PipelineTest.getJDBCPipeline();
    output.intoConfiguration(origin);

    Plumber input = Plumber.createWithoutPath();
    Reader reader = new FileReader("./config/" + origin.getName() + ".xml");
    Pipeline deserialized = input.fromInputStream(reader);
    assertNotSame(deserialized, origin);
    assertThat(deserialized, is(origin));
  }
コード例 #18
0
 @Deprecated // Only actually passes the first stage. Use newPipelineWithAllStagesPassed instead
 public Pipeline passPipeline(Pipeline pipeline) {
   for (Stage stage : pipeline.getStages()) {
     passStage(stage);
   }
   Stages loadedStages = new Stages();
   for (Stage stage : pipeline.getStages()) {
     loadedStages.add(stageDao.stageById(stage.getId()));
   }
   Pipeline loadedPipeline = this.pipelineDao.loadPipeline(pipeline.getId());
   loadedPipeline.setStages(loadedStages);
   return loadedPipeline;
 }
コード例 #19
0
  private Pipeline scheduleJobInstancesAndSavePipeline(Pipeline pipeline) {
    assertNotInserted(pipeline.getId());
    for (Stage stage : pipeline.getStages()) {
      for (JobInstance jobInstance : stage.getJobInstances()) {
        jobInstance.schedule();
      }
    }
    this.savePipelineWithStagesAndMaterials(pipeline);

    long pipelineId = pipeline.getId();
    assertIsInserted(pipelineId);
    return pipeline;
  }
コード例 #20
0
  /** Example test that tests a PTransform by using an in-memory input and inspecting the output. */
  @Test
  @Category(RunnableOnService.class)
  public void testCountWords() throws Exception {
    Pipeline p = TestPipeline.create();

    PCollection<String> input = p.apply(Create.of(WORDS).withCoder(StringUtf8Coder.of()));

    PCollection<String> output = input.apply(new CountWords())
      .apply(ParDo.of(new FormatAsTextFn()));

    DataflowAssert.that(output).containsInAnyOrder(COUNTS_ARRAY);
    p.run();
  }
コード例 #21
0
  @Test
  public void shouldCancelOutOfDateBuilds() throws Exception {
    fixture.createPipelineWithFirstStageScheduled();
    buildAssignmentService.onTimer();
    configHelper.removeStage(fixture.pipelineName, fixture.devStage);

    buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());

    Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
    JobInstance job = pipeline.getFirstStage().getJobInstances().first();
    assertThat(job.getState(), is(JobState.Completed));
    assertThat(job.getResult(), is(JobResult.Cancelled));
  }
コード例 #22
0
  @Test
  public void shouldReturnLastSuccesfulLabel() throws Exception {
    StageIdentifier successfulStage =
        new StageIdentifier(pipeline.getName(), 1, "LABEL:1", stage.getName(), "1");
    StageJsonPresentationModel presenter =
        new StageJsonPresentationModel(pipeline, stage, successfulStage, new Agents());
    Map json = presenter.toJson();

    new JsonTester(json)
        .shouldContain(
            "{ 'last_successful_label' : 'LABEL:1', 'last_successful_stage_locator' : '"
                + String.format("%s/%s/%s/%s", pipeline.getName(), "1", stage.getName(), "1")
                + "' }");
  }
コード例 #23
0
 @Before
 public void setUp() {
   MaterialRevisions materialRevisions = multipleModifications();
   stage =
       StageMother.withOneScheduledBuild("stage", "job-that-will-fail", "job-that-will-pass", 1);
   modifications = BuildCause.createWithModifications(materialRevisions, "");
   pipeline = new Pipeline("pipeline", PipelineLabel.COUNT_TEMPLATE, modifications, stage);
   stage.setIdentifier(new StageIdentifier(pipeline, stage));
   for (JobInstance job : stage.getJobInstances()) {
     job.setIdentifier(new JobIdentifier(pipeline, stage, job));
   }
   pipeline.setId(PIPELINE_ID);
   pipeline.updateCounter(9);
 }
コード例 #24
0
 @Override
 public void run() {
   while (!decoder.complete()) {
     decoder.run();
     decoder.pushPacket(new Packet(ppl.receive()));
   }
   ppl.complete();
   try {
     decoder.dump();
   } catch (Exception e) {
     e.printStackTrace();
   }
   System.out.println("Decoder complete : " + decoder.received + " packets received.");
 }
コード例 #25
0
  @Test
  public void shouldCancelBuildBelongingToNonExistentPipelineWhenCreatingWork() throws Exception {
    fixture.createPipelineWithFirstStageScheduled();
    Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);

    ScheduledPipelineLoader scheduledPipelineLoader = mock(ScheduledPipelineLoader.class);
    when(scheduledPipelineLoader.pipelineWithPasswordAwareBuildCauseByBuildId(
            pipeline.getFirstStage().getJobInstances().first().getId()))
        .thenThrow(new PipelineNotFoundException("thrown by mockPipelineService"));

    GoConfigService mockGoConfigService = mock(GoConfigService.class);
    CruiseConfig config = configHelper.currentConfig();
    configHelper.removePipeline(fixture.pipelineName, config);
    when(mockGoConfigService.getCurrentConfig()).thenReturn(config);

    buildAssignmentService =
        new BuildAssignmentService(
            mockGoConfigService,
            jobInstanceService,
            scheduleService,
            agentService,
            environmentConfigService,
            timeProvider,
            transactionTemplate,
            scheduledPipelineLoader,
            pipelineService,
            builderFactory,
            agentRemoteHandler);
    buildAssignmentService.onTimer();

    AgentConfig agentConfig = AgentMother.localAgent();
    agentConfig.addResource(new Resource("some-other-resource"));

    try {
      buildAssignmentService.assignWorkToAgent(agent(agentConfig));
      fail("should have thrown PipelineNotFoundException");
    } catch (PipelineNotFoundException e) {
      // ok
    }

    pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);

    JobInstance job = pipeline.getFirstStage().getJobInstances().first();
    assertThat(job.getState(), is(JobState.Completed));
    assertThat(job.getResult(), is(JobResult.Cancelled));
    Stage stage = stageDao.findStageWithIdentifier(job.getIdentifier().getStageIdentifier());
    assertThat(stage.getState(), is(StageState.Cancelled));
    assertThat(stage.getResult(), is(StageResult.Cancelled));
  }
コード例 #26
0
  @Test
  public void shouldGetAPresenterWithLabelAndRelevantBuildPlans() throws Exception {
    DurationBeans durations =
        new DurationBeans(
            new DurationBean(stage.getJobInstances().getByName("job-that-will-fail").getId(), 12L));

    StageJsonPresentationModel presenter =
        new StageJsonPresentationModel(
            pipeline, stage, null, new Agents(), durations, new TrackingTool());
    Map json = presenter.toJson();

    new JsonTester(json)
        .shouldContain(
            "{ 'stageName' : 'stage',"
                + "  'builds' : ["
                + "    { 'name' : 'job-that-will-fail', 'last_build_duration' : '12' },"
                + "    { 'name' : 'job-that-will-pass' }, "
                + "    { 'name' : 'scheduledBuild' }"
                + "  ],"
                + " 'current_label' : '"
                + pipeline.getLabel()
                + "',"
                + " 'id' : '1' "
                + "}");
    assertFalse(
        "JSON shouldn't contain last_successful_label",
        json.toString().contains("last_successful_label"));
  }
コード例 #27
0
  // Wrapper around the native call for 3D textures
  void updateTextureImage(
      Canvas3D cv,
      int face,
      int numLevels,
      int level,
      int textureFormat,
      int imageFormat,
      int width,
      int height,
      int depth,
      int boundaryWidth,
      int imageDataType,
      Object imageData) {

    Pipeline.getPipeline()
        .updateTexture3DImage(
            cv.ctx,
            numLevels,
            level,
            textureFormat,
            imageFormat,
            width,
            height,
            depth,
            boundaryWidth,
            imageDataType,
            imageData,
            useAutoMipMapGeneration(cv));
  }
コード例 #28
0
ファイル: ConfigurationIT.java プロジェクト: rdblue/crunch
 private static void run(Pipeline p, String input, String expected) throws Exception {
   Iterable<String> mat =
       p.read(From.textFile(input))
           .parallelDo(
               "conf",
               CONFIG_FN,
               Writables.strings(),
               ParallelDoOptions.builder().conf(KEY, expected).build())
           .materialize();
   for (String v : mat) {
     if (!expected.equals(v)) {
       Assert.fail("Unexpected value: " + v);
     }
   }
   p.done();
 }
コード例 #29
0
ファイル: MRPipelineIT.java プロジェクト: rdblue/crunch
 @Test
 public void materializedColShouldBeWritten() throws Exception {
   File textFile = tmpDir.copyResourceFile("shakes.txt");
   Pipeline pipeline = new MRPipeline(MRPipelineIT.class, tmpDir.getDefaultConfiguration());
   PCollection<String> genericCollection = pipeline.readTextFile(textFile.getAbsolutePath());
   pipeline.run();
   PCollection<String> filter =
       genericCollection.filter("Filtering data", FilterFns.<String>ACCEPT_ALL());
   filter.materialize();
   pipeline.run();
   File file = tmpDir.getFile("output.txt");
   Target outFile = To.textFile(file.getAbsolutePath());
   PCollection<String> write = filter.write(outFile);
   write.materialize();
   pipeline.run();
 }
コード例 #30
0
 public Pipeline newPipelineWithFirstStageFailed(PipelineConfig config) throws SQLException {
   Pipeline pipeline =
       instanceFactory.createPipelineInstance(
           config,
           BuildCause.createManualForced(
               modifyOneFile(
                   MaterialsMother.createMaterialsFromMaterialConfigs(config.materialConfigs()),
                   ModificationsMother.currentRevision()),
               Username.ANONYMOUS),
           new DefaultSchedulingContext(GoConstants.DEFAULT_APPROVED_BY),
           md5,
           new TimeProvider());
   savePipelineWithStagesAndMaterials(pipeline);
   failStage(pipeline.getFirstStage());
   return pipeline;
 }