@Test public void shouldReturnTheCorrectLocalizedMessageForDuplicatePipelinesInAnEnvironment() { BasicEnvironmentConfig environmentConfig = environmentConfig("uat"); goConfigService.addPipeline( PipelineConfigMother.createPipelineConfig("foo", "dev", "job"), "foo-grp"); environmentConfig.addPipeline(new CaseInsensitiveString("foo")); goConfigService.addEnvironment(environmentConfig); ArrayList<String> pipelines = new ArrayList<>(); pipelines.add("foo"); HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); service.createEnvironment( env("foo-env", pipelines, new ArrayList<Map<String, String>>(), new ArrayList<String>()), new Username(new CaseInsensitiveString("any")), result); result = new HttpLocalizedOperationResult(); service.createEnvironment( env("env", pipelines, new ArrayList<Map<String, String>>(), new ArrayList<String>()), new Username(new CaseInsensitiveString("any")), result); assertThat( result.message(localizer), is( "Failed to add environment. Associating pipeline(s) which is already part of uat environment")); }
@Test public void shouldCancelBuildBelongingToNonExistentPipelineWhenCreatingWork() throws Exception { fixture.createPipelineWithFirstStageScheduled(); Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); ScheduledPipelineLoader scheduledPipelineLoader = mock(ScheduledPipelineLoader.class); when(scheduledPipelineLoader.pipelineWithPasswordAwareBuildCauseByBuildId( pipeline.getFirstStage().getJobInstances().first().getId())) .thenThrow(new PipelineNotFoundException("thrown by mockPipelineService")); GoConfigService mockGoConfigService = mock(GoConfigService.class); CruiseConfig config = configHelper.currentConfig(); configHelper.removePipeline(fixture.pipelineName, config); when(mockGoConfigService.getCurrentConfig()).thenReturn(config); buildAssignmentService = new BuildAssignmentService( mockGoConfigService, jobInstanceService, scheduleService, agentService, environmentConfigService, timeProvider, transactionTemplate, scheduledPipelineLoader, pipelineService, builderFactory, agentRemoteHandler); buildAssignmentService.onTimer(); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("some-other-resource")); try { buildAssignmentService.assignWorkToAgent(agent(agentConfig)); fail("should have thrown PipelineNotFoundException"); } catch (PipelineNotFoundException e) { // ok } pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); JobInstance job = pipeline.getFirstStage().getJobInstances().first(); assertThat(job.getState(), is(JobState.Completed)); assertThat(job.getResult(), is(JobResult.Cancelled)); Stage stage = stageDao.findStageWithIdentifier(job.getIdentifier().getStageIdentifier()); assertThat(stage.getState(), is(StageState.Cancelled)); assertThat(stage.getResult(), is(StageResult.Cancelled)); }
@Before public void setup() throws Exception { configHelper = new GoConfigFileHelper(); configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile(); configHelper.onSetUp(); goConfigService.forceNotifyListeners(); }
@Test public void shouldDeleteAnEnvironment() throws Exception { String environmentName = "dev"; HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); goConfigService.addEnvironment( new BasicEnvironmentConfig(new CaseInsensitiveString(environmentName))); assertTrue(goConfigService.hasEnvironmentNamed(new CaseInsensitiveString(environmentName))); service.deleteEnvironment( service.getEnvironmentConfig(environmentName), new Username(new CaseInsensitiveString("foo")), result); assertFalse(goConfigService.hasEnvironmentNamed(new CaseInsensitiveString(environmentName))); assertThat( result.message(localizer), containsString("Environment 'dev' was deleted successfully.")); }
@Test public void shouldCancelBuildsForDeletedJobsWhenPipelineConfigChanges() throws Exception { fixture = new PipelineWithTwoStages(materialRepository, transactionTemplate).usingTwoJobs(); fixture.usingConfigHelper(configHelper).usingDbHelper(dbHelper).onSetUp(); fixture.createPipelineWithFirstStageScheduled(); buildAssignmentService.onTimer(); configHelper.removeJob(fixture.pipelineName, fixture.devStage, fixture.JOB_FOR_DEV_STAGE); buildAssignmentService.onPipelineConfigChange( goConfigService .getCurrentConfig() .getPipelineConfigByName(new CaseInsensitiveString(fixture.pipelineName)), "g1"); Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); JobInstance deletedJob = pipeline.getFirstStage().getJobInstances().getByName(fixture.JOB_FOR_DEV_STAGE); assertThat(deletedJob.getState(), is(JobState.Completed)); assertThat(deletedJob.getResult(), is(JobResult.Cancelled)); JobInstance retainedJob = pipeline.getFirstStage().getJobInstances().getByName(fixture.DEV_STAGE_SECOND_JOB); assertThat(retainedJob.getState(), is(JobState.Scheduled)); assertThat(retainedJob.getResult(), is(JobResult.Unknown)); }
@Test public void shouldNotDeadlockWhenAllPossibleWaysOfUpdatingTheConfigAreBeingUsedAtTheSameTime() throws Exception { final ArrayList<Thread> configSaveThreads = new ArrayList<>(); final int pipelineCreatedThroughApiCount = 100; final int pipelineCreatedThroughUICount = 100; for (int i = 0; i < pipelineCreatedThroughUICount; i++) { Thread thread = configSaveThread(i); configSaveThreads.add(thread); } for (int i = 0; i < pipelineCreatedThroughApiCount; i++) { Thread thread = pipelineSaveThread(i); configSaveThreads.add(thread); } for (Thread configSaveThread : configSaveThreads) { Thread timerThread = null; try { timerThread = createThread( new Runnable() { @Override public void run() { try { File configFile = new File(goConfigDao.fileLocation()); String currentConfig = FileUtil.readContentFromFile(configFile); String updatedConfig = currentConfig.replaceFirst( "artifactsdir=\".*\"", "artifactsdir=\"" + UUID.randomUUID().toString() + "\""); FileUtil.writeContentToFile(updatedConfig, configFile); } catch (IOException e) { fail("Failed with error: " + e.getMessage()); } cachedFileGoConfig.forceReload(); } }, "timer-thread"); } catch (InterruptedException e) { fail(e.getMessage()); } try { configSaveThread.start(); timerThread.start(); configSaveThread.join(); timerThread.join(); } catch (InterruptedException e) { fail(e.getMessage()); } } assertThat( goConfigService.getAllPipelineConfigs().size(), is(pipelineCreatedThroughApiCount + pipelineCreatedThroughUICount)); }
@After public void teardown() throws Exception { goCache.clear(); agentService.clearAll(); fixture.onTearDown(); dbHelper.onTearDown(); configHelper.onTearDown(); FileUtil.deleteFolder(goConfigService.artifactsDir()); agentAssignment.clear(); agentRemoteHandler.connectedAgents().clear(); }
public AdminAndRoleSelections getAdminAndRoleSelections(List<String> users) { final SecurityConfig securityConfig = goConfigService.security(); Set<Role> roles = allRoles(securityConfig); final List<TriStateSelection> roleSelections = TriStateSelection.forRoles(roles, users); final TriStateSelection adminSelection = TriStateSelection.forSystemAdmin( securityConfig.adminsConfig(), roles, new SecurityService.UserRoleMatcherImpl(securityConfig), users); return new AdminAndRoleSelections(adminSelection, roleSelections); }
public void modifyRolesAndUserAdminPrivileges( final List<String> users, final TriStateSelection adminPrivilege, final List<TriStateSelection> roleSelections, LocalizedOperationResult result) { Users allUsers = userDao.allUsers(); for (String user : users) { if (!allUsers.containsUserNamed(user)) { result.badRequest(LocalizedMessage.string("USER_DOES_NOT_EXIST_IN_DB", user)); return; } } try { final GoConfigDao.CompositeConfigCommand command = new GoConfigDao.CompositeConfigCommand(); command.addCommand(goConfigService.modifyRolesCommand(users, roleSelections)); command.addCommand(goConfigService.modifyAdminPrivilegesCommand(users, adminPrivilege)); goConfigService.updateConfig(command); } catch (Exception e) { result.badRequest(LocalizedMessage.string("INVALID_ROLE_NAME", e.getMessage())); } }
@Test public void shouldCancelOutOfDateBuilds() throws Exception { fixture.createPipelineWithFirstStageScheduled(); buildAssignmentService.onTimer(); configHelper.removeStage(fixture.pipelineName, fixture.devStage); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); JobInstance job = pipeline.getFirstStage().getJobInstances().first(); assertThat(job.getState(), is(JobState.Completed)); assertThat(job.getResult(), is(JobResult.Cancelled)); }
@Before public void setup() throws Exception { configHelper = new GoConfigFileHelper(); dbHelper.onSetUp(); configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile(); configHelper.onSetUp(); goConfigService.forceNotifyListeners(); File dumpDir = tempFolder.newFolder("perf-pipelineapi-test"); FileUtil.deleteDirectoryNoisily(dumpDir); dumpDir.mkdirs(); result = new HttpLocalizedOperationResult(); user = new Username(new CaseInsensitiveString("admin")); consoleAppenderForPerformanceTest = "ConsoleAppenderForPerformanceTest"; rollingFileAppenderForPerformanceTest = "RollingFileAppenderForPerformanceTest"; }
@Test public void shouldRemoveAllJobPlansThatAreNotInConfig() { CruiseConfig oldConfig = goConfigService.getCurrentConfig(); ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", "s1", u.m(new HgMaterial("hg", null))); Pipeline p1_1 = instanceFactory.createPipelineInstance( p1.config, modifyNoFiles(p1.config), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", "s1", u.m(new HgMaterial("hg", null))); Pipeline p2_1 = instanceFactory.createPipelineInstance( p2.config, modifyNoFiles(p2.config), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(p1_1); dbHelper.savePipelineWithStagesAndMaterials(p2_1); CruiseConfig cruiseConfig = goConfigService.getCurrentConfig(); buildAssignmentService.onConfigChange(cruiseConfig); buildAssignmentService.onTimer(); List<JobPlan> plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans"); assertThat(plans.isEmpty(), is(false)); assertThat(plans.size(), is(2)); configHelper.writeConfigFile(oldConfig); plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans"); assertThat("Actual size is " + plans.size(), plans.isEmpty(), is(true)); }
@Test public void shouldUpdateExistingEnvironment() throws Exception { BasicEnvironmentConfig uat = environmentConfig("uat"); goConfigService.addPipeline( PipelineConfigMother.createPipelineConfig("foo", "dev", "job"), "foo-grp"); goConfigService.addPipeline( PipelineConfigMother.createPipelineConfig("bar", "dev", "job"), "foo-grp"); Username user = Username.ANONYMOUS; agentConfigService.addAgent(new AgentConfig("uuid-1", "host-1", "192.168.1.2"), user); agentConfigService.addAgent(new AgentConfig("uuid-2", "host-2", "192.168.1.3"), user); uat.addPipeline(new CaseInsensitiveString("foo")); uat.addAgent("uuid-2"); uat.addEnvironmentVariable("env-one", "ONE"); uat.addEnvironmentVariable("env-two", "TWO"); goConfigService.addEnvironment(new BasicEnvironmentConfig(new CaseInsensitiveString("dev"))); goConfigService.addEnvironment(new BasicEnvironmentConfig(new CaseInsensitiveString("qa"))); goConfigService.addEnvironment(uat); goConfigService.addEnvironment( new BasicEnvironmentConfig(new CaseInsensitiveString("acceptance"))); goConfigService.addEnvironment( new BasicEnvironmentConfig(new CaseInsensitiveString("function_testing"))); EnvironmentConfig newUat = new BasicEnvironmentConfig(new CaseInsensitiveString("prod")); newUat.addPipeline(new CaseInsensitiveString("bar")); newUat.addAgent("uuid-1"); newUat.addEnvironmentVariable("env-three", "THREE"); HttpLocalizedOperationResult result = service.updateEnvironment( "uat", newUat, new Username(new CaseInsensitiveString("foo")), goConfigDao.md5OfConfigFile()); EnvironmentConfig updatedEnv = service.named("prod"); assertThat(updatedEnv.name(), is(new CaseInsensitiveString("prod"))); assertThat(updatedEnv.getAgents().getUuids(), is(Arrays.asList("uuid-1"))); assertThat(updatedEnv.getPipelineNames(), is(Arrays.asList(new CaseInsensitiveString("bar")))); EnvironmentVariablesConfig updatedVariables = new EnvironmentVariablesConfig(); updatedVariables.add("env-three", "THREE"); assertThat(updatedEnv.getVariables(), is(updatedVariables)); EnvironmentsConfig currentEnvironments = goConfigService.getCurrentConfig().getEnvironments(); assertThat(currentEnvironments.indexOf(updatedEnv), is(2)); assertThat(currentEnvironments.size(), is(5)); }
private List<UserModel> allUsersForDisplay() { Collection<User> users = allUsers(); ArrayList<UserModel> userModels = new ArrayList<UserModel>(); for (User user : users) { String userName = user.getName(); ArrayList<String> roles = new ArrayList<String>(); for (Role role : goConfigService.rolesForUser(new CaseInsensitiveString(userName))) { roles.add(CaseInsensitiveString.str(role.getName())); } userModels.add( new UserModel( user, roles, securityService.isUserAdmin(new Username(new CaseInsensitiveString(userName))))); } return userModels; }
@Before public void setUp() throws Exception { configHelper = new GoConfigFileHelper().usingCruiseConfigDao(goConfigDao); configHelper.onSetUp(); dbHelper.onSetUp(); fixture = new PipelineWithTwoStages(materialRepository, transactionTemplate); fixture.usingConfigHelper(configHelper).usingDbHelper(dbHelper).onSetUp(); repository = new SvnCommand(null, testRepo.projectRepositoryUrl()); evolveConfig = configHelper.addPipeline("evolve", STAGE_NAME, repository, "unit"); configHelper.addPipeline("anotherPipeline", STAGE_NAME, repository, "anotherTest"); configHelper.addPipeline("thirdPipeline", STAGE_NAME, repository, "yetAnotherTest"); goConfigService.forceNotifyListeners(); goCache.clear(); u = new ScheduleTestUtil(transactionTemplate, materialRepository, dbHelper, configHelper); agent = new AgentStub(); }
@Test public void shouldNotAssignCancelledJob() throws Exception { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance( evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); JobInstance job = buildOf(pipeline); job.cancel(); jobInstanceDao.updateStateAndResult(job); assertThat( buildAssignmentService.assignWorkToAgent(instance), is((Work) BuildAssignmentService.NO_WORK)); }
@Test public void shouldNotAssignWorkWhenPipelineScheduledWithStaleMaterials() { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance( evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); evolveConfig.setMaterialConfigs(new MaterialConfigs(new HgMaterialConfig("foo", null))); configHelper.removePipeline(CaseInsensitiveString.str(evolveConfig.name())); configHelper.addPipeline(evolveConfig); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); JobInstance job = buildOf(pipeline); jobInstanceDao.updateStateAndResult(job); assertThat( buildAssignmentService.assignWorkToAgent(instance), is((Work) BuildAssignmentService.NO_WORK)); }
@Test public void shouldRescheduleAbandonedBuild() throws SQLException { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance( evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); buildAssignmentService.onTimer(); buildAssignmentService.assignWorkToAgent(instance); long firstAssignedBuildId = buildOf(pipeline).getId(); // somehow agent abandoned its original build... buildAssignmentService.assignWorkToAgent(instance); JobInstance reloaded = jobInstanceDao.buildByIdWithTransitions(firstAssignedBuildId); assertThat(reloaded.getState(), is(JobState.Rescheduled)); assertThat(reloaded.isIgnored(), is(true)); }
private void setupPipelines(Integer numberOfPipelinesToBeCreated) throws Exception { String groupName = "jumbo"; String configFile = "<FULL PATH TO YOUR CONFIG FILE>"; String xml = FileUtil.readContentFromFile(new File(configFile)); xml = goConfigMigration.upgradeIfNecessary(xml); goConfigService .fileSaver(false) .saveConfig(xml, goConfigService.getConfigForEditing().getMd5()); LOGGER.info( String.format( "Total number of pipelines in this config: %s", goConfigService.getConfigForEditing().allPipelines().size())); if (goConfigService.getConfigForEditing().hasPipelineGroup(groupName)) { ((BasicPipelineConfigs) goConfigService.getConfigForEditing().findGroup(groupName)).clear(); } final CruiseConfig configForEditing = goConfigService.getConfigForEditing(); for (int i = 0; i < numberOfPipelinesToBeCreated; i++) { JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("job")); StageConfig stageConfig = new StageConfig(new CaseInsensitiveString("stage"), new JobConfigs(jobConfig)); PipelineConfig pipelineConfig = new PipelineConfig( new CaseInsensitiveString("pipeline" + i), new MaterialConfigs(new GitMaterialConfig("FOO")), stageConfig); configForEditing.addPipeline(groupName, pipelineConfig); } goConfigService.updateConfig( new UpdateConfigCommand() { @Override public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception { return configForEditing; } }); }
/** * (uppest/2/uppest-stage/1) |------------------> upper-peer ------- | * ...................................|............................................... | . | . [ * uppest-stage ............................|...................... {bar.zip * uppest/upper-peer/downer} V . uppest uppest-stage-2 ------> upper ------> downer ------> * downest {foo.zip uppest/upper/downer} (uppest/1/uppest-stage-2/1) uppest-stage-3 ] * * <p>.... :: fetch artifact call ---> :: material dependency */ @Test public void shouldCreateWork_withAncestorFetchArtifactCalls_resolvedToRelevantStage() throws Exception { configHelper.addPipeline("uppest", "uppest-stage"); configHelper.addStageToPipeline("uppest", "uppest-stage-2"); PipelineConfig uppest = configHelper.addStageToPipeline("uppest", "uppest-stage-3"); configHelper.addPipeline("upper", "upper-stage"); DependencyMaterial upper_sMaterial = new DependencyMaterial( new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage-2")); PipelineConfig upper = configHelper.setMaterialConfigForPipeline("upper", upper_sMaterial.config()); configHelper.addPipeline("upper-peer", "upper-peer-stage"); DependencyMaterial upperPeer_sMaterial = new DependencyMaterial( new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage")); PipelineConfig upperPeer = configHelper.setMaterialConfigForPipeline("upper-peer", upperPeer_sMaterial.config()); configHelper.addPipeline("downer", "downer-stage"); DependencyMaterial downer_sUpperMaterial = new DependencyMaterial( new CaseInsensitiveString("upper"), new CaseInsensitiveString("upper-stage")); configHelper.setMaterialConfigForPipeline("downer", downer_sUpperMaterial.config()); DependencyMaterial downer_sUpperPeerMaterial = new DependencyMaterial( new CaseInsensitiveString("upper-peer"), new CaseInsensitiveString("upper-peer-stage")); PipelineConfig downer = configHelper.addMaterialToPipeline("downer", downer_sUpperPeerMaterial.config()); configHelper.addPipeline("downest", "downest-stage"); DependencyMaterial downest_sMaterial = new DependencyMaterial( new CaseInsensitiveString("downer"), new CaseInsensitiveString("downer-stage")); configHelper.setMaterialConfigForPipeline("downest", downest_sMaterial.config()); Tasks allFetchTasks = new Tasks(); allFetchTasks.add( new FetchTask( new CaseInsensitiveString("uppest/upper/downer"), new CaseInsensitiveString("uppest-stage"), new CaseInsensitiveString("unit"), "foo.zip", "bar")); allFetchTasks.add( new FetchTask( new CaseInsensitiveString("uppest/upper-peer/downer"), new CaseInsensitiveString("uppest-stage"), new CaseInsensitiveString("unit"), "bar.zip", "baz")); configHelper.replaceAllJobsInStage( "downest", "downest-stage", new JobConfig( new CaseInsensitiveString("fetcher"), new Resources("fetcher"), new ArtifactPlans(), allFetchTasks)); PipelineConfig downest = goConfigService .getCurrentConfig() .pipelineConfigByName(new CaseInsensitiveString("downest")); DefaultSchedulingContext defaultSchedulingCtx = new DefaultSchedulingContext(DEFAULT_APPROVED_BY); Pipeline uppestInstanceForUpper = instanceFactory.createPipelineInstance( uppest, modifySomeFiles(uppest), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpper); dbHelper.passStage(uppestInstanceForUpper.findStage("uppest-stage")); Stage upper_sMaterialStage = dbHelper.scheduleStage( uppestInstanceForUpper, uppest.getStage(new CaseInsensitiveString("uppest-stage-2"))); dbHelper.passStage(upper_sMaterialStage); Pipeline uppestInstanceForUpperPeer = instanceFactory.createPipelineInstance( uppest, modifySomeFiles(uppest), new DefaultSchedulingContext("super-hero"), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpperPeer); Stage upperPeer_sMaterialStage = uppestInstanceForUpperPeer.findStage("uppest-stage"); dbHelper.passStage(upperPeer_sMaterialStage); Pipeline upperInstance = instanceFactory.createPipelineInstance( upper, buildCauseForDependency(upper_sMaterial, upper_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(upperInstance); Stage downer_sUpperMaterialStage = upperInstance.findStage("upper-stage"); dbHelper.passStage(downer_sUpperMaterialStage); Pipeline upperPeerInstance = instanceFactory.createPipelineInstance( upperPeer, buildCauseForDependency(upperPeer_sMaterial, upperPeer_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(upperPeerInstance); Stage downer_sUpperPeerMaterialStage = upperPeerInstance.findStage("upper-peer-stage"); dbHelper.passStage(downer_sUpperPeerMaterialStage); MaterialRevisions downer_sMaterialRevisions = new MaterialRevisions( materialRevisionForDownstream(downer_sUpperMaterial, downer_sUpperMaterialStage), materialRevisionForDownstream( downer_sUpperPeerMaterial, downer_sUpperPeerMaterialStage)); Pipeline downerInstance = instanceFactory.createPipelineInstance( downer, BuildCause.createManualForced(downer_sMaterialRevisions, loserUser), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(downerInstance); Stage downest_sMaterialStage = downerInstance.findStage("downer-stage"); dbHelper.passStage(downest_sMaterialStage); Pipeline downestInstance = instanceFactory.createPipelineInstance( downest, buildCauseForDependency(downest_sMaterial, downest_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(downestInstance); buildAssignmentService.onTimer(); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("fetcher")); BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(agentConfig)); List<Builder> builders = work.getAssignment().getBuilders(); FetchArtifactBuilder fooZipFetch = (FetchArtifactBuilder) builders.get(0); assertThat(fooZipFetch.artifactLocator(), is("uppest/1/uppest-stage/latest/unit/foo.zip")); FetchArtifactBuilder barZipFetch = (FetchArtifactBuilder) builders.get(1); assertThat(barZipFetch.artifactLocator(), is("uppest/2/uppest-stage/1/unit/bar.zip")); }
private void assertUnknownUsersAreAllowedToLogin() { if (goConfigService.isOnlyKnownUserAllowedToLogin()) { throw new OnlyKnownUsersAllowedException("Please ask the administrator to add you to Go"); } }