public void testIncludePatterns() throws IOException { // Put system properties String gogoKey = "gogo1"; String gogo2Key = "gogo2a"; String kokoKey = "koko"; System.setProperty(kokoKey, "parent"); System.setProperty(gogoKey, "1"); System.setProperty(gogo2Key, "2"); Properties startProps = new Properties(); startProps.put(BuildInfoConfigProperties.PROP_ENV_VARS_INCLUDE_PATTERNS, "gogo?*"); Properties buildInfoProperties = BuildInfoExtractorUtils.getEnvProperties(startProps, null); assertEquals( buildInfoProperties.getProperty("gogo1"), "1", "gogo1 parent number property does not match"); assertEquals( buildInfoProperties.getProperty("gogo2a"), "2", "gogo2a parent number property does not match"); assertNull( buildInfoProperties.getProperty("koko"), "Should not find koko property due to include patterns"); System.clearProperty(gogoKey); System.clearProperty(gogo2Key); System.clearProperty(kokoKey); }
public void getEnvAndSysPropertiesFromFile() throws IOException { // create a property file File propsFile = new File("tempPropFile"); propsFile.createNewFile(); Properties props = new Properties(); props.put(ENV_POPO_KEY, "buildname"); props.put(ENV_MOMO_KEY, "1"); props.store(new FileOutputStream(propsFile), ""); System.setProperty(BuildInfoConfigProperties.PROP_PROPS_FILE, propsFile.getAbsolutePath()); // Put system properties String kokoKey = "koko"; String gogoKey = "gogo"; System.setProperty(kokoKey, "parent"); System.setProperty(gogoKey, "2"); Properties buildInfoProperties = BuildInfoExtractorUtils.getEnvProperties(new Properties(), null); assertEquals( buildInfoProperties.getProperty(ENV_POPO_KEY), "buildname", "popo property does not match"); assertEquals( buildInfoProperties.getProperty(ENV_MOMO_KEY), "1", "momo number property does not match"); assertEquals( buildInfoProperties.getProperty("koko"), "parent", "koko parent name property does not match"); assertEquals( buildInfoProperties.getProperty("gogo"), "2", "gogo parent number property does not match"); propsFile.delete(); System.clearProperty(BuildInfoConfigProperties.PROP_PROPS_FILE); System.clearProperty(kokoKey); System.clearProperty(gogoKey); }
@After public void close() { // We need to clear some system properties System.clearProperty("python.cachedir.skip"); System.clearProperty("python.console.encoding"); se.close(); }
@Test public void testClasspathSetupNone() throws Exception { System.clearProperty(HiveConf.ConfVars.HADOOPBIN.toString()); String originalHadoopBin = new HiveConf().get(HiveConf.ConfVars.HADOOPBIN.toString()); List<String> inputURLs = Lists.newArrayList(); inputURLs.add("/usr/lib/hbase/lib/hbase-hadoop2-compat-0.96.1.2.0.11.0-1-hadoop2.jar"); inputURLs.add("/usr/lib/hbase/lib/hbase-thrift-0.96.1.2.0.11.0-1-hadoop2.jar"); inputURLs.add("/usr/lib/hadoop/hadoop-common-2.2.0.2.0.11.0-1-tests.jar"); HiveConf hiveConf = new HiveConf(); LocalMapreduceClasspathSetter classpathSetter = new LocalMapreduceClasspathSetter( hiveConf, TEMP_FOLDER.newFolder().getAbsolutePath(), ImmutableList.<String>of()); for (String url : inputURLs) { classpathSetter.accept(url); } Assert.assertTrue(classpathSetter.getHbaseProtocolJarPaths().isEmpty()); classpathSetter.setupClasspathScript(); String newHadoopBin = new HiveConf().get(HiveConf.ConfVars.HADOOPBIN.toString()); Assert.assertEquals(originalHadoopBin, newHadoopBin); System.clearProperty(HiveConf.ConfVars.HADOOPBIN.toString()); }
@After public void tearDown() throws Exception { System.clearProperty("hadoop.log.dir"); System.clearProperty("solr.hdfs.blockcache.enabled"); super.tearDown(); }
@AfterClass public static void shutDown() { System.clearProperty("polling.frequency"); System.clearProperty("watermark.default.expression"); System.clearProperty("account.sync.policy"); System.clearProperty("trigger.policy"); }
/* * Do it. * We update PROP_ERROR here. */ public void run() { _isRunning = true; System.clearProperty(PROP_ERROR); System.setProperty(PROP_STATUS, _("Reseeding")); System.setProperty(PROP_INPROGRESS, "true"); _attemptStarted = 0; _gotDate = 0; _sslState = null; // start fresh if (_context.getBooleanProperty(PROP_PROXY_ENABLE)) { _proxyHost = _context.getProperty(PROP_PROXY_HOST); _proxyPort = _context.getProperty(PROP_PROXY_PORT, -1); } System.out.println("Reseed start"); int total = reseed(false); if (total >= 50) { System.out.println("Reseed complete, " + total + " received"); System.clearProperty(PROP_ERROR); } else if (total > 0) { System.out.println("Reseed complete, only " + total + " received"); System.setProperty(PROP_ERROR, ngettext("Reseed fetched only 1 router.", "Reseed fetched only {0} routers.", total)); } else { System.out.println("Reseed failed, check network connection"); System.out.println( "Ensure that nothing blocks outbound HTTP, check the logs, " + "and if nothing helps, read the FAQ about reseeding manually."); System.setProperty(PROP_ERROR, _("Reseed failed.") + ' ' + _("See {0} for help.", "<a target=\"_top\" href=\"/configreseed\">" + _("reseed configuration page") + "</a>")); } System.setProperty(PROP_INPROGRESS, "false"); System.clearProperty(PROP_STATUS); _sslState = null; // don't hold ref _isRunning = false; }
@Test public void testWhiteBlackListSystemProperty() throws Exception { File serailizeFile = new File(temporaryFolder.getRoot(), "testclass.bin"); ObjectOutputStream outputStream = new ObjectOutputStream(new FileOutputStream(serailizeFile)); try { outputStream.writeObject(new TestClass1()); outputStream.flush(); } finally { outputStream.close(); } System.setProperty( ObjectInputStreamWithClassLoader.BLACKLIST_PROPERTY, "system.defined.black.list"); System.setProperty( ObjectInputStreamWithClassLoader.WHITELIST_PROPERTY, "system.defined.white.list"); try { ObjectInputStreamWithClassLoader ois = new ObjectInputStreamWithClassLoader(new FileInputStream(serailizeFile)); String bList = ois.getBlackList(); String wList = ois.getWhiteList(); assertEquals("wrong black list: " + bList, "system.defined.black.list", bList); assertEquals("wrong white list: " + wList, "system.defined.white.list", wList); ois.close(); } finally { System.clearProperty(ObjectInputStreamWithClassLoader.BLACKLIST_PROPERTY); System.clearProperty(ObjectInputStreamWithClassLoader.WHITELIST_PROPERTY); } }
public static void executeTestFile(File baseDir, File releaseTestFile, TestReport testReport) throws IOException { String testDataDirectory = releaseTestFile.getParent(); info(""); info("---------------------------------------------------------------"); info("Lancement du test release : " + releaseTestFile.getAbsolutePath()); info("\tRepertoire des donnees du test : " + testDataDirectory); info("\tRepertoire de lancement : " + baseDir); // Surcharge du comportement de ANT System.setProperty("test.dir", testDataDirectory); System.setProperty("basedir", baseDir.getAbsolutePath()); // Lancement du test try { Project project = new Project(); project.addBuildListener(new TokioLoadListener(testReport)); project.addBuildListener(new TokioInsertListener(testReport)); AntRunner.start(project, new AntGenerator().generateAntFile(releaseTestFile)); } catch (IOException e) { error("[TEST EN ECHEC :: " + releaseTestFile.getName() + "]", e); throw e; } finally { System.clearProperty("test.dir"); System.clearProperty("basedir"); } }
/** * Stops the master and cleans up client connections. * * @throws Exception when the operation fails */ public void stop() throws Exception { clearClients(); mAlluxioMaster.stop(); System.clearProperty("alluxio.web.resources"); System.clearProperty("alluxio.master.min.worker.threads"); }
@TearDown public void tearDown() { System.clearProperty("log4j.configurationFile"); System.clearProperty("log4j.configuration"); System.clearProperty("logback.configurationFile"); deleteLogFiles(); }
public void restoreProperties() { if (bind_addr == null) System.clearProperty("jgroups.bind_addr"); else System.setProperty("jgroups.bind_addr", bind_addr); if (mcast_addr == null) System.clearProperty("jgroups.udp.mcast_addr"); else System.setProperty("jgroups.udp.mcast_addr", mcast_addr); if (mcast_port == null) System.clearProperty("jgroups.udp.mcast_port"); else System.setProperty("jgroups.udp.mcast_port", mcast_port); }
/** * Stops the master and cleans up client connections. * * <p>This method will not clean up {@link tachyon.util.UnderFileSystemUtils} data. To do that you * must call {@link #cleanupUnderfs()}. * * @throws Exception when the operation fails */ public void stop() throws Exception { clearClients(); mTachyonMaster.stop(); System.clearProperty("tachyon.web.resources"); System.clearProperty("tachyon.master.min.worker.threads"); }
@After public final void after() throws Exception { mBlockMasterClient.close(); mLocalTachyonCluster.stop(); System.clearProperty(Constants.WORKER_DATA_SERVER); System.clearProperty(Constants.WORKER_NETWORK_NETTY_FILE_TRANSFER_TYPE); System.clearProperty(Constants.USER_REMOTE_BLOCK_READER); }
@After public final void after() throws Exception { mLocalTachyonCluster.stop(); // TODO Remove this once we are able to push tiered storage info to LocalTachyonCluster System.clearProperty(Constants.WORKER_MAX_TIERED_STORAGE_LEVEL); System.clearProperty(String.format(Constants.WORKER_TIERED_STORAGE_LEVEL_ALIAS_FORMAT, 1)); System.clearProperty(String.format(Constants.WORKER_TIERED_STORAGE_LEVEL_DIRS_PATH_FORMAT, 1)); System.clearProperty(String.format(Constants.WORKER_TIERED_STORAGE_LEVEL_DIRS_QUOTA_FORMAT, 1)); }
@Override public void distribTearDown() throws Exception { if (!success) { printLayoutOnTearDown = true; } System.clearProperty("solr.directoryFactory"); System.clearProperty("solr.ulog.numRecordsToKeep"); System.clearProperty("tests.zk.violationReportAction"); super.distribTearDown(); }
@Override public void tearDown() throws Exception { super.tearDown(); System.clearProperty("numShards"); System.clearProperty("zkHost"); System.clearProperty("solr.xml.persist"); // insurance DirectUpdateHandler2.commitOnClose = true; }
@After public void cleanup() { if (this.context != null) { this.context.close(); } System.clearProperty("the.property"); System.clearProperty("spring.config.location"); System.clearProperty("spring.main.banner-mode"); System.clearProperty(CachedIntrospectionResults.IGNORE_BEANINFO_PROPERTY_NAME); }
@Test public void testReplacementForTwoVariablesWithAppendix() { String value = "${testproperty1}${testproperty2}.appendix"; System.setProperty("testproperty1", "correctreplacement1"); System.setProperty("testproperty2", "correctreplacement2"); String result = BounceProxySystemPropertyLoader.replaceVariableBySystemProperty(value); Assert.assertEquals("correctreplacement1correctreplacement2.appendix", result); System.clearProperty("testproperty1"); System.clearProperty("testproperty2"); }
@Override public void tearDown() throws Exception { Thread.sleep(2000); if (this.shootme != null) this.shootme.stop(); if (this.shootist != null) this.shootist.stop(); System.clearProperty("javax.net.ssl.keyStore"); System.clearProperty("javax.net.ssl.trustStore"); System.clearProperty("javax.net.ssl.keyStorePassword"); System.clearProperty("javax.net.ssl.keyStoreType"); }
@After public void tearDown() throws Exception { if (hs2Conn != null) { hs2Conn.close(); } if (miniHS2 != null && miniHS2.isStarted()) { miniHS2.stop(); } System.clearProperty(JAVA_TRUST_STORE_PROP); System.clearProperty(JAVA_TRUST_STORE_PASS_PROP); }
public void testComplete() throws Exception { System.setProperty("foo.bar", "bar"); System.setProperty("foo.bar2", "bar"); try { CompletionMatch completion = assertComplete("system propget foo"); assertEquals(2, completion.getValue().getSize()); assertTrue(completion.getValue().get(".bar") != null); assertTrue(completion.getValue().get(".bar2") != null); } finally { System.clearProperty("foo.bar"); System.clearProperty("foo.bar2"); } }
@AfterClass public static void afterClass() throws Exception { System.clearProperty("solr.ulog.dir"); System.clearProperty("test.build.data"); System.clearProperty("test.cache.data"); deleteCore(); IOUtils.closeQuietly(fs); fs = null; HdfsTestUtil.teardownClass(dfsCluster); hdfsDataDir = null; dfsCluster = null; }
@After public void releaseCachSupport() throws Exception { testSupport.tearDown(); if (preferIPv4Stack == null) { System.clearProperty(PREFER_IPV4STACK); } else { System.setProperty(PREFER_IPV4STACK, preferIPv4Stack); } if (jgroupsCfgFile == null) System.clearProperty(JGROUPS_CFG_FILE); else System.setProperty(JGROUPS_CFG_FILE, jgroupsCfgFile); }
@Test public void testThatTribeClientsIgnoreGlobalSysProps() throws Exception { System.setProperty("es.cluster.name", "tribe_node_cluster"); System.setProperty("es.tribe.t1.cluster.name", "tribe1"); System.setProperty("es.tribe.t2.cluster.name", "tribe2"); try { assertTribeNodeSuccesfullyCreated(Settings.EMPTY); } finally { System.clearProperty("es.cluster.name"); System.clearProperty("es.tribe.t1.cluster.name"); System.clearProperty("es.tribe.t2.cluster.name"); } }
/** * * Test SSL client with non-SSL server fails * * @throws Exception */ @Test public void testInvalidConfig() throws Exception { clearSslConfOverlay(confOverlay); // Test in binary mode setBinaryConfOverlay(confOverlay); miniHS2.start(confOverlay); DriverManager.setLoginTimeout(4); try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); fail("SSL connection should fail with NON-SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } System.setProperty(JAVA_TRUST_STORE_PROP, dataFileDir + File.separator + TRUST_STORE_NAME); System.setProperty(JAVA_TRUST_STORE_PASS_PROP, KEY_STORE_PASSWORD); try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL() + ";ssl=true", System.getProperty("user.name"), "bar"); fail("SSL connection should fail with NON-SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } miniHS2.stop(); // Test in http mode with ssl properties specified in url System.clearProperty(JAVA_TRUST_STORE_PROP); System.clearProperty(JAVA_TRUST_STORE_PASS_PROP); setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); fail("SSL connection should fail with NON-SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } }
public void getBuildInfoPropertiesFromSystemProps() throws IOException { System.setProperty(POPO_KEY, "buildname"); System.setProperty(MOMO_KEY, "1"); Properties props = BuildInfoExtractorUtils.filterDynamicProperties( BuildInfoExtractorUtils.mergePropertiesWithSystemAndPropertyFile(new Properties()), BuildInfoExtractorUtils.BUILD_INFO_PROP_PREDICATE); assertEquals(props.size(), 2, "there should only be 2 properties after the filtering"); assertEquals(props.getProperty(POPO_KEY), "buildname", "popo property does not match"); assertEquals(props.getProperty(MOMO_KEY), "1", "momo property does not match"); System.clearProperty(POPO_KEY); System.clearProperty(MOMO_KEY); }
@Test public void testClasspathSetupMulti() throws Exception { System.clearProperty(HiveConf.ConfVars.HADOOPBIN.toString()); List<String> inputURLs = Lists.newArrayList(); inputURLs.add("/usr/lib/hbase/lib/hbase-protocol-0.96.1.2.0.11.0-1-hadoop2.jar"); inputURLs.add( "/opt/cloudera/parcels/CDH-5.0.0-0.cdh5b2.p0.27/lib/hbase/hbase-protocol-0.95.0.jar"); inputURLs.add( "/home/cloudera/.m2/repository/org/apache/hbase/hbase-protocol/0.95.1-hadoop1/" + "hbase-protocol-0.95.1-hadoop1.jar"); inputURLs.add("/usr/lib/hbase/lib/hbase-hadoop2-compat-0.96.1.2.0.11.0-1-hadoop2.jar"); inputURLs.add("/usr/lib/hbase/lib/hbase-thrift-0.96.1.2.0.11.0-1-hadoop2.jar"); inputURLs.add("/usr/lib/hadoop/hadoop-common-2.2.0.2.0.11.0-1-tests.jar"); List<String> auxJarsURLs = Lists.newArrayList(); auxJarsURLs.add( "/hadoop/hadoop/nm-local-dir/usercache/cdap/appcache/org.ow2.asm.asm-all-4.0.jar"); auxJarsURLs.add( "/hadoop/hadoop/nm-local-dir/usercache/cdap/appcache/co.cask.cdap.common-2.4.0-SNAPSHOT.jar"); HiveConf hiveConf = new HiveConf(); LocalMapreduceClasspathSetter classpathSetter = new LocalMapreduceClasspathSetter( hiveConf, TEMP_FOLDER.newFolder().getAbsolutePath(), auxJarsURLs); for (String url : inputURLs) { classpathSetter.accept(url); } Assert.assertEquals( ImmutableList.of( "/usr/lib/hbase/lib/hbase-protocol-0.96.1.2.0.11.0-1-hadoop2.jar", "/opt/cloudera/parcels/CDH-5.0.0-0.cdh5b2.p0.27/lib/hbase/" + "hbase-protocol-0.95.0.jar", "/home/cloudera/.m2/repository/org/apache/hbase/hbase-protocol/" + "0.95.1-hadoop1/hbase-protocol-0.95.1-hadoop1.jar"), ImmutableList.copyOf(classpathSetter.getHbaseProtocolJarPaths())); classpathSetter.setupClasspathScript(); String newHadoopBin = new HiveConf().get(HiveConf.ConfVars.HADOOPBIN.toString()); Assert.assertEquals( generatedHadoopBinMulti, Joiner.on('\n').join(Files.readLines(new File(newHadoopBin), Charsets.UTF_8))); Assert.assertTrue(new File(newHadoopBin).canExecute()); System.clearProperty(HiveConf.ConfVars.HADOOPBIN.toString()); }
/** Un-initialization. */ @Override public void destroy() throws Exception { log.debug("TomcatVHostLoader un-init"); Container[] children = host.findChildren(); for (Container c : children) { if (c instanceof StandardContext) { try { ((StandardContext) c).stop(); host.removeChild(c); } catch (Exception e) { log.error("Could not stop context: {}", c.getName(), e); } } } // remove system prop String propertyPrefix = name; if (domain != null) { propertyPrefix += '_' + domain.replace('.', '_'); } System.clearProperty(propertyPrefix + ".webapp.root"); // stop the host try { ((StandardHost) host).stop(); } catch (LifecycleException e) { log.error("Could not stop host: {}", host.getName(), e); } // remove host engine.removeChild(host); // unregister jmx unregisterJMX(); }
public static void upgradeClusterMembers( TestHazelcastInstanceFactory factory, final HazelcastInstance[] membersToUpgrade, MemberVersion version, Config config, boolean assertClusterSize) { try { System.setProperty(HAZELCAST_INTERNAL_OVERRIDE_VERSION, version.toString()); // upgrade one by one each member of the cluster to the next version for (int i = 0; i < membersToUpgrade.length; i++) { membersToUpgrade[i].shutdown(); waitAllForSafeState(membersToUpgrade); // if new node's version is incompatible, then node startup will fail with // IllegalStateException membersToUpgrade[i] = factory.newHazelcastInstance(config); waitAllForSafeState(membersToUpgrade); if (assertClusterSize) { // assert all members are in the cluster assertTrueEventually( new AssertTask() { @Override public void run() throws Exception { assertEquals( membersToUpgrade.length, membersToUpgrade[0].getCluster().getMembers().size()); } }, 30); } } } finally { System.clearProperty(HAZELCAST_INTERNAL_OVERRIDE_VERSION); } }