public TestWorkflowDataSourceRepository() throws SQLException, FileNotFoundException { // set the log levels System.setProperty( "java.util.logging.config.file", new File("./src/main/resources/logging.properties").getAbsolutePath()); // first load the example configuration try { System.getProperties().load(new FileInputStream("./src/main/resources/workflow.properties")); } catch (Exception e) { fail(e.getMessage()); } // get a temp directory File tempDir = null; File tempFile; try { tempFile = File.createTempFile("foo", "bar"); tempFile.deleteOnExit(); tempDir = tempFile.getParentFile(); } catch (Exception e) { fail(e.getMessage()); } tmpDirPath = tempDir.getAbsolutePath(); }
/** For issue #86: https://github.com/elasticsearch/elasticsearch-cloud-aws/issues/86 */ @Test public void testGetDeleteNonExistingSnapshot_86() { ClusterAdminClient client = client().admin().cluster(); logger.info("--> creating s3 repository without any path"); PutRepositoryResponse putRepositoryResponse = client .preparePutRepository("test-repo") .setType("s3") .setSettings(Settings.settingsBuilder().put("base_path", basePath)) .get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); try { client.prepareGetSnapshots("test-repo").addSnapshots("no-existing-snapshot").get(); fail("Shouldn't be here"); } catch (SnapshotMissingException ex) { // Expected } try { client.prepareDeleteSnapshot("test-repo", "no-existing-snapshot").get(); fail("Shouldn't be here"); } catch (SnapshotMissingException ex) { // Expected } }
@Test public void testRefCount() throws IOException { final ShardId shardId = new ShardId(new Index("index"), 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store( shardId, ImmutableSettings.EMPTY, directoryService, randomDistributor(directoryService), new DummyShardLock(shardId)); int incs = randomIntBetween(1, 100); for (int i = 0; i < incs; i++) { if (randomBoolean()) { store.incRef(); } else { assertTrue(store.tryIncRef()); } store.ensureOpen(); } for (int i = 0; i < incs; i++) { store.decRef(); store.ensureOpen(); } store.incRef(); final AtomicBoolean called = new AtomicBoolean(false); store.close(); for (int i = 0; i < incs; i++) { if (randomBoolean()) { store.incRef(); } else { assertTrue(store.tryIncRef()); } store.ensureOpen(); } for (int i = 0; i < incs; i++) { store.decRef(); store.ensureOpen(); } store.decRef(); assertThat(store.refCount(), Matchers.equalTo(0)); assertFalse(store.tryIncRef()); try { store.incRef(); fail(" expected exception"); } catch (AlreadyClosedException ex) { } try { store.ensureOpen(); fail(" expected exception"); } catch (AlreadyClosedException ex) { } }
private static void startXmlRpcWorkflowManager() { System.setProperty( "java.util.logging.config.file", new File("./src/main/resources/logging.properties").getAbsolutePath()); try { System.getProperties().load(new FileInputStream("./src/main/resources/workflow.properties")); } catch (Exception e) { fail(e.getMessage()); } try { luceneCatLoc = Files.createTempDirectory("repo").toString(); LOG.log(Level.INFO, "Lucene instance repository: [" + luceneCatLoc + "]"); } catch (Exception e) { fail(e.getMessage()); } if (new File(luceneCatLoc).exists()) { // blow away lucene cat LOG.log(Level.INFO, "Removing workflow instance repository: [" + luceneCatLoc + "]"); try { FileUtils.deleteDirectory(new File(luceneCatLoc)); } catch (IOException e) { fail(e.getMessage()); } } System.setProperty( "workflow.engine.instanceRep.factory", "org.apache.oodt.cas.workflow.instrepo.LuceneWorkflowInstanceRepositoryFactory"); System.setProperty("org.apache.oodt.cas.workflow.instanceRep.lucene.idxPath", luceneCatLoc); try { System.setProperty( "org.apache.oodt.cas.workflow.repo.dirs", "file://" + new File("./src/main/resources/examples").getCanonicalPath()); System.setProperty( "org.apache.oodt.cas.workflow.lifecycle.filePath", new File("./src/main/resources/examples/workflow-lifecycle.xml").getCanonicalPath()); } catch (Exception e) { fail(e.getMessage()); } try { wmgr = new XmlRpcWorkflowManager(WM_PORT); Thread.sleep(MILLIS); } catch (Exception e) { LOG.log(Level.SEVERE, e.getMessage()); fail(e.getMessage()); } }
@Test public void should_fail_if_io_error_occurs_when_reading_cql_script() { CqlExecCassandraMojo cqlExecCassandraMojo = builder.cqlScript(file("emptyfile.cql")).build(); mockToThrows(new IOException()); try { cqlExecCassandraMojo.execute(); fail(); } catch (MojoExecutionException e) { assertEquals("Could not parse or load cql file", e.getMessage()); } catch (MojoFailureException e) { fail(e.getMessage()); } }
private static void stopXmlRpcWorkflowManager() { System.setProperty( "java.util.logging.config.file", new File("./src/main/resources/logging.properties").getAbsolutePath()); try { System.getProperties().load(new FileInputStream("./src/main/resources/workflow.properties")); } catch (Exception e) { fail(e.getMessage()); } System.setProperty( "workflow.engine.instanceRep.factory", "org.apache.oodt.cas.workflow.instrepo.LuceneWorkflowInstanceRepositoryFactory"); System.setProperty("org.apache.oodt.cas.workflow.instanceRep.lucene.idxPath", luceneCatLoc); try { System.setProperty( "org.apache.oodt.cas.workflow.repo.dirs", "file://" + new File("./src/main/resources/examples").getCanonicalPath()); System.setProperty( "org.apache.oodt.cas.workflow.lifecycle.filePath", new File("./src/main/resources/examples/workflow-lifecycle.xml").getCanonicalPath()); } catch (Exception e) { fail(e.getMessage()); } try { wmgr.shutdown(); } catch (Exception e) { LOG.log(Level.SEVERE, e.getMessage()); fail(e.getMessage()); } /** Sleep before removing to prevent file not found issues. */ try { Thread.sleep(MILLIS); } catch (InterruptedException e) { e.printStackTrace(); } if (new File(luceneCatLoc).exists()) { // blow away lucene cat LOG.log(Level.INFO, "Removing workflow instance repository: [" + luceneCatLoc + "]"); try { FileUtils.deleteDirectory(new File(luceneCatLoc)); } catch (IOException e) { fail(e.getMessage()); } } }
@Test public void testIllegalArgument() { try { new SpanNotQueryBuilder(null, SpanTermQueryBuilder.PROTOTYPE); fail("cannot be null"); } catch (IllegalArgumentException e) { // expected } try { new SpanNotQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, null); fail("cannot be null"); } catch (IllegalArgumentException e) { // expected } }
@Test public void reportsTyposInTaskName() { final Task task1 = task("someTask"); final Task task2 = task("someTasks"); final Task task3 = task("sometask"); final Task task4 = task("other"); context.checking( new Expectations() { { one(project).getTasksByName("ssomeTask", true); will(returnValue(toSet())); one(taskContainer).getAll(); will(returnValue(toSet(task1, task2))); one(subProjectTaskContainer).getAll(); will(returnValue(toSet(task3, task4))); } }); TaskNameResolvingBuildExecuter executer = new TaskNameResolvingBuildExecuter(toList("ssomeTask")); try { executer.select(gradle); fail(); } catch (TaskSelectionException e) { assertThat( e.getMessage(), equalTo( "Task 'ssomeTask' not found in [project]. Some candidates are: 'someTask', 'someTasks', 'sometask'.")); } }
@Test public void failsWhenProvidedNameIsAmbiguous() { final Task task1 = task("someTask"); final Task task2 = task("someTasks"); context.checking( new Expectations() { { one(project).getTasksByName("soTa", true); will(returnValue(toSet())); one(taskContainer).getAll(); will(returnValue(toSet(task1))); one(subProjectTaskContainer).getAll(); will(returnValue(toSet(task2))); } }); TaskNameResolvingBuildExecuter executer = new TaskNameResolvingBuildExecuter(toList("soTa")); try { executer.select(gradle); fail(); } catch (TaskSelectionException e) { assertThat( e.getMessage(), equalTo( "Task 'soTa' is ambiguous in [project]. Candidates are: 'someTask', 'someTasks'.")); } }
@Test public void overloadedNonStandardWriteMethodsOnly_orderB() throws IntrospectionException, SecurityException, NoSuchMethodException { @SuppressWarnings("unused") class C { public Object setFoo(int p) { return new Object(); } public Object setFoo(String p) { return new Object(); } } BeanInfo bi = Introspector.getBeanInfo(C.class); assertThat(hasReadMethodForProperty(bi, "foo"), is(false)); assertThat(hasWriteMethodForProperty(bi, "foo"), is(false)); BeanInfo ebi = new ExtendedBeanInfo(bi); assertThat(hasReadMethodForProperty(bi, "foo"), is(false)); assertThat(hasWriteMethodForProperty(bi, "foo"), is(false)); assertThat(hasReadMethodForProperty(ebi, "foo"), is(false)); assertThat(hasWriteMethodForProperty(ebi, "foo"), is(true)); for (PropertyDescriptor pd : ebi.getPropertyDescriptors()) { if (pd.getName().equals("foo")) { assertThat(pd.getWriteMethod(), is(C.class.getMethod("setFoo", String.class))); return; } } fail("never matched write method"); }
@Test public void testAliasInvalidFilterValidJson() throws Exception { // invalid filter but valid json: put index template works fine, fails during index creation client() .admin() .indices() .preparePutTemplate("template_1") .setTemplate("te*") .addAlias(new Alias("invalid_alias").filter("{ \"invalid\": {} }")) .get(); GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates("template_1").get(); assertThat(response.getIndexTemplates().size(), equalTo(1)); assertThat(response.getIndexTemplates().get(0).getAliases().size(), equalTo(1)); assertThat( response.getIndexTemplates().get(0).getAliases().get("invalid_alias").filter().string(), equalTo("{\"invalid\":{}}")); try { createIndex("test"); fail( "index creation should have failed due to invalid alias filter in matching index template"); } catch (ElasticsearchIllegalArgumentException e) { assertThat(e.getMessage(), equalTo("failed to parse filter for alias [invalid_alias]")); assertThat(e.getCause(), instanceOf(QueryParsingException.class)); assertThat(e.getCause().getMessage(), equalTo("[test] No filter registered for [invalid]")); } }
/** * Executes batch but finalizer is failed. * * @throws Exception if failed */ @Test public void executeBatch_failed_finalize() throws Exception { ProfileBuilder prf = new ProfileBuilder(folder.getRoot()); prf.setTracker(FinalizerFailed.class); ExecutionTask task = prf.task(); try { task.executeBatch("batch"); fail(); } catch (IOException e) { // ok. } List<Record> results = SerialExecutionTracker.get(prf.trackingId); verifyPhaseOrder(results); assertThat(phase(results, "testing", ExecutionPhase.SETUP).size(), is(2)); assertThat(phase(results, "testing", ExecutionPhase.INITIALIZE).size(), is(1)); assertThat(phase(results, "testing", ExecutionPhase.IMPORT).size(), is(2)); assertThat(phase(results, "testing", ExecutionPhase.PROLOGUE).size(), is(1)); assertThat(phase(results, "testing", ExecutionPhase.MAIN).size(), is(4)); assertThat(phase(results, "testing", ExecutionPhase.EPILOGUE).size(), is(1)); assertThat(phase(results, "testing", ExecutionPhase.EXPORT).size(), is(2)); assertThat(phase(results, "testing", ExecutionPhase.FINALIZE).size(), is(1)); assertThat(phase(results, "testing", ExecutionPhase.CLEANUP).size(), is(0)); assertThat(flow(results, "left").size(), is(0)); assertThat(flow(results, "right").size(), is(0)); assertThat(flow(results, "last").size(), is(0)); }
/** Test case for issue #86: https://github.com/elasticsearch/elasticsearch-cloud-aws/issues/86 */ @Test public void testNonExistingRepo_86() { Client client = client(); logger.info( "--> creating s3 repository with bucket[{}] and path [{}]", internalCluster().getInstance(Settings.class).get("repositories.s3.bucket"), basePath); PutRepositoryResponse putRepositoryResponse = client .admin() .cluster() .preparePutRepository("test-repo") .setType("s3") .setSettings(Settings.settingsBuilder().put("base_path", basePath)) .get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); logger.info("--> restore non existing snapshot"); try { client .admin() .cluster() .prepareRestoreSnapshot("test-repo", "no-existing-snapshot") .setWaitForCompletion(true) .execute() .actionGet(); fail("Shouldn't be here"); } catch (SnapshotMissingException ex) { // Expected } }
/** * This test verifies that the test configuration is set up in a manner that does not make the * test {@link #testRepositoryInRemoteRegion()} pointless. */ @Test(expected = RepositoryVerificationException.class) public void assertRepositoryInRemoteRegionIsRemote() { Client client = client(); Settings bucketSettings = internalCluster().getInstance(Settings.class).getByPrefix("repositories.s3.remote-bucket."); logger.info( "--> creating s3 repository with bucket[{}] and path [{}]", bucketSettings.get("bucket"), basePath); client .admin() .cluster() .preparePutRepository("test-repo") .setType("s3") .setSettings( Settings.settingsBuilder() .put("base_path", basePath) .put("bucket", bucketSettings.get("bucket")) // Below setting intentionally omitted to assert bucket is not available in default // region. // .put("region", privateBucketSettings.get("region")) ) .get(); fail("repository verification should have raise an exception!"); }
void checkBulkAction(boolean indexShouldBeAutoCreated, BulkRequestBuilder builder) { // bulk operation do not throw MasterNotDiscoveredException exceptions. The only test that auto // create kicked in and failed is // via the timeout, as bulk operation do not wait on blocks. TimeValue timeout; if (indexShouldBeAutoCreated) { // we expect the bulk to fail because it will try to go to the master. Use small timeout and // detect it has passed timeout = new TimeValue(200); } else { // the request should fail very quickly - use a large timeout and make sure it didn't pass... timeout = new TimeValue(5000); } builder.setTimeout(timeout); long now = System.currentTimeMillis(); try { builder.get(); fail("Expected ClusterBlockException"); } catch (ClusterBlockException e) { if (indexShouldBeAutoCreated) { // timeout is 200 assertThat(System.currentTimeMillis() - now, greaterThan(timeout.millis() - 50)); assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); } else { // timeout is 5000 assertThat(System.currentTimeMillis() - now, lessThan(timeout.millis() - 50)); } } }
@Test public void testBrokenMapping() throws Exception { // clean all templates setup by the framework. client().admin().indices().prepareDeleteTemplate("*").get(); // check get all templates on an empty index. GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get(); assertThat(response.getIndexTemplates(), empty()); client() .admin() .indices() .preparePutTemplate("template_1") .setTemplate("te*") .addMapping("type1", "abcde") .get(); response = client().admin().indices().prepareGetTemplates().get(); assertThat(response.getIndexTemplates(), hasSize(1)); assertThat(response.getIndexTemplates().get(0).getMappings().size(), equalTo(1)); assertThat( response.getIndexTemplates().get(0).getMappings().get("type1").string(), equalTo("abcde")); try { createIndex("test"); fail("create index should have failed due to broken index templates mapping"); } catch (ElasticsearchParseException e) { // everything fine } }
@Test public void failsWhenUnknownTaskNameIsProvided() { final Task task1 = task("t1"); final Task task2 = task("t2"); context.checking( new Expectations() { { one(project).getTasksByName("b3", true); will(returnValue(toSet())); one(taskContainer).getAll(); will(returnValue(toSet(task1, task2))); one(subProjectTaskContainer).getAll(); will(returnValue(toSet())); } }); BuildExecuter executer = new TaskNameResolvingBuildExecuter(toList("b3")); try { executer.select(gradle); fail(); } catch (TaskSelectionException e) { assertThat(e.getMessage(), equalTo("Task 'b3' not found in [project].")); } }
@Test @SuppressWarnings("unchecked") public void testDoWithRetry_ReadTimeOut() throws Exception { AmazonElasticMapReduceCustomClient client = new AmazonElasticMapReduceCustomClient("dummy", "dummy"); client.setRequestInterval(100); Callable callable = mock(Callable.class); AmazonClientException exception = new AmazonClientException("Unable to execute HTTP request: Read timed out"); when(callable.call()).thenThrow(exception, exception, exception).thenReturn(new Object()); long startTime = System.currentTimeMillis(); Object result = client.doThrottleSafe(callable); assertNotNull(result); assertThat( (System.currentTimeMillis() - startTime), greaterThanOrEqualTo(3 * client.getRequestInterval())); // now exceed retries client.setMaxRetriesOnConnectionErrors(2); when(callable.call()).thenThrow(exception, exception, exception).thenReturn(new Object()); try { client.doThrottleSafe(callable); fail("should throw exception"); } catch (Exception e) { assertSame(exception, e); } }
/** * Lock provider for batch. * * @throws Exception if failed */ @Test public void batch() throws Exception { Map<String, String> conf = new HashMap<>(); conf.put(BasicLockProvider.KEY_DIRECTORY, folder.getRoot().getAbsolutePath()); conf.put(ExecutionLockProvider.KEY_SCOPE, ExecutionLock.Scope.BATCH.getSymbol()); ServiceProfile<ExecutionLockProvider> profile = new ServiceProfile<>( "testing", BasicLockProvider.class, conf, ProfileContext.system(getClass().getClassLoader())); ExecutionLockProvider instance1 = profile.newInstance(); ExecutionLockProvider instance2 = profile.newInstance(); try (ExecutionLock lock = instance1.newInstance("batch1")) { lock.beginFlow("flow1", "exec1"); try { instance2.newInstance("batch1"); fail("cannot run same batch"); } catch (IOException e) { // ok. } try (ExecutionLock other = instance2.newInstance("batch2")) { // can acquire any flow/exec lock other.beginFlow("flow2", "exec1"); other.endFlow("flow2", "exec1"); other.beginFlow("flow1", "exec2"); other.endFlow("flow1", "exec2"); other.beginFlow("flow2", "exec2"); other.endFlow("flow2", "exec2"); } } }
/** * Tries to create payment against review invoice. Here, instead of using the billing process to * generate a review invoice we are creating a review invoice with the help of saveLegacyInvoice * call. */ @Test public void testPayReviewInvoice() { // creating new user UserWS user = buildUser(PRANCING_PONY_ACCOUNT_TYPE); user.setId(api.createUser(user)); ItemTypeWS itemType = buildItemType(); itemType.setId(api.createItemCategory(itemType)); ItemDTOEx item = buildItem(itemType.getId(), api.getCallerCompanyId()); item.setId(api.createItem(item)); InvoiceWS invoice = buildInvoice(user.getId(), item.getId()); invoice.setIsReview(Integer.valueOf(1)); invoice.setId(api.saveLegacyInvoice(invoice)); // check if invoice is a review invoice System.out.println("Invoice is review : " + invoice.getIsReview()); assertEquals("Invoice is a review invoice", Integer.valueOf(1), invoice.getIsReview()); try { // pay for a review invoice api.payInvoice(invoice.getId()); fail("We should not be able to issue a payment against review invoice"); } catch (SessionInternalError e) { System.out.println(e.getMessage()); } // clean up api.deleteInvoice(invoice.getId()); api.deleteItem(item.getId()); api.deleteItemCategory(itemType.getId()); api.deleteUser(user.getId()); }
private void runTestWithForbiddenName(String name) throws IOException { try { pluginManager(null).removePlugin(name); fail("this plugin name [" + name + "] should not be allowed"); } catch (ElasticsearchIllegalArgumentException e) { // We expect that error } }
private void mockToThrows(Throwable throwable) { try { mockStatic(IOUtil.class); when(IOUtil.toString(any(FileReader.class))).thenThrow(throwable); } catch (IOException e) { fail(e.getMessage()); } }
@Test public void getByPathFailsForUnknownTask() { try { container.getByPath("unknown"); fail(); } catch (UnknownTaskException e) { assertThat(e.getMessage(), equalTo("Task with path 'unknown' not found in <project>.")); } }
@Test public void should_fail_if_file_not_found_occurs_when_reading_cql_script() { CqlExecCassandraMojo cqlExecCassandraMojo = builder.cqlScript(file("emptyfile.cql")).build(); mockToThrows(new FileNotFoundException()); try { cqlExecCassandraMojo.execute(); fail(); } catch (MojoExecutionException e) { assertThat( e.getMessage(), allOf( startsWith("Cql file '"), endsWith("emptyfile.cql' was deleted before I could read it"))); } catch (MojoFailureException e) { fail(e.getMessage()); } }
@Test public void should_fail_when_request_fails() { CqlExecCassandraMojo cqlExecCassandraMojo = builder.cqlStatement(CQL_STATEMENT).build(); mockThriftExecutionWith( new ThrowsException( new ThriftApiExecutionException(new InvalidRequestException("bad statement")))); try { cqlExecCassandraMojo.execute(); fail(); } catch (MojoExecutionException e) { assertEquals( "There was a problem calling Apache Cassandra's Thrift API. Details: The request was not properly formatted bad statement", e.getMessage()); } catch (MojoFailureException e) { fail(e.getMessage()); } }
@Test public void testCreateInstanceWithoutHttpHandler() throws Exception { try { new RestfitClient.Builder().userAgent("RestfitTest/1.0").build(); fail("never reached"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("No httpStack specified")); } }
@Test public void testCreateInstanceWithoutUserAgent() throws Exception { try { new RestfitClient.Builder().httpStack(new DummyHttpStack()).build(); fail("never reached"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("No userAgent specified")); } }
private static void assertCanCreateWritableFile(File destination) throws IOException { boolean createdFreshFile = destination.createNewFile(); if (createdFreshFile) { try { if (destination.canWrite()) { try (FileWriter writer = new FileWriter(destination)) { writer.append("dummy test output"); } } else { fail(String.format("Cannot write to %s", destination)); } } finally { destination.delete(); } } else { fail(String.format("Cannot create file: %s", destination)); } }
@Test public void parseShouldRejectChecksumFailure() { try { IBAN.parse(INVALID_IBAN); fail("Invalid input should have been rejected for checksum mismatch."); } catch (WrongChecksumException e) { assertThat(e.getFailedInput(), is(INVALID_IBAN)); } }
/** * Attempts to convert profile without retry count. * * @throws Exception if failed */ @Test public void convert_count_unknown() throws Exception { ProcessProfile profile = profile(KEY_COMPONENT, DummyProcess.class.getName()); try { RetryableProcessProfile.convert(profile); fail(); } catch (IllegalArgumentException e) { // ok. } }