/** Not a unit test - this simply runs a MiniHS2 cluster, which can be used for manual testing. */ @Test public void testRunCluster() throws Exception { if (!Boolean.parseBoolean(System.getProperty("miniHS2.run", "false"))) { return; } MiniClusterType clusterType = MiniClusterType.valueOf(System.getProperty("miniHS2.clusterType", "MR").toUpperCase()); String confFilesProperty = System.getProperty("miniHS2.conf", "../../data/conf/hive-site.xml"); boolean usePortsFromConf = Boolean.parseBoolean(System.getProperty("miniHS2.usePortsFromConf", "false")); // Load conf files String[] confFiles = confFilesProperty.split(","); int idx; for (idx = 0; idx < confFiles.length; ++idx) { String confFile = confFiles[idx]; if (confFile.isEmpty()) { continue; } HiveConf.setHiveSiteLocation(new URL("file://" + new File(confFile).toURI().getPath())); break; } HiveConf conf = new HiveConf(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true); for (; idx < confFiles.length; ++idx) { String confFile = confFiles[idx]; if (confFile.isEmpty()) { continue; } conf.addResource(new URL("file://" + new File(confFile).toURI().getPath())); } miniHS2 = new MiniHS2(conf, clusterType, usePortsFromConf); Map<String, String> confOverlay = new HashMap<String, String>(); miniHS2.start(confOverlay); miniHS2.getDFS().getFileSystem().mkdirs(new Path("/apps_staging_dir/anonymous")); System.out.println("JDBC URL avaailable at " + miniHS2.getJdbcURL()); // MiniHS2 cluster is up .. let it run until someone kills the test while (true) { Thread.sleep(1000); } }
public HiveTestUtil( String outDir, String logDir, MiniClusterType clusterType, String confDir, String hadoopVer) throws Exception { this.outDir = outDir; this.logDir = logDir; if (confDir != null && !confDir.isEmpty()) { HiveConf.setHiveSiteLocation( new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); LOG.info("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } conf = new HiveConf(); String tmpBaseDir = System.getProperty("test.tmp.dir"); if (tmpBaseDir == null || tmpBaseDir == "") { tmpBaseDir = System.getProperty("java.io.tmpdir"); } String metaStoreURL = "jdbc:derby:" + tmpBaseDir + File.separator + "metastore_dbtest;" + "create=true"; conf.set(ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL); System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL); // set where derby logs File derbyLogFile = new File(tmpBaseDir + "/derby.log"); derbyLogFile.createNewFile(); System.setProperty("derby.stream.error.file", derbyLogFile.getPath()); this.hadoopVer = getHadoopMainVersion(hadoopVer); qMap = new TreeMap<String, String>(); qSkipSet = new HashSet<String>(); qSortSet = new HashSet<String>(); qSortQuerySet = new HashSet<String>(); qHashQuerySet = new HashSet<String>(); qSortNHashQuerySet = new HashSet<String>(); qJavaVersionSpecificOutput = new HashSet<String>(); this.clusterType = clusterType; // Using randomUUID for dfs cluster System.setProperty("test.build.data", "target/test-data/hive-" + UUID.randomUUID().toString()); HadoopShims shims = ShimLoader.getHadoopShims(); int numberOfDataNodes = 4; if (clusterType != MiniClusterType.none) { dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null); FileSystem fs = dfs.getFileSystem(); String uriString = WindowsPathUtil.getHdfsUriString(fs.getUri().toString()); if (clusterType == MiniClusterType.tez) { mr = shims.getMiniTezCluster(conf, 4, uriString, 1); } else { mr = shims.getMiniMrCluster(conf, 4, uriString, 1); } } initConf(); // Use the current directory if it is not specified String dataDir = conf.get("test.data.files"); if (dataDir == null) { dataDir = new File(".").getAbsolutePath() + "/data/files"; } testFiles = dataDir; // Use the current directory if it is not specified String scriptsDir = conf.get("test.data.scripts"); if (scriptsDir == null) { scriptsDir = new File(".").getAbsolutePath() + "/data/scripts"; } if (!initScript.isEmpty()) { this.initScript = scriptsDir + "/" + initScript; } if (!cleanupScript.isEmpty()) { this.cleanupScript = scriptsDir + "/" + cleanupScript; } overWrite = "true".equalsIgnoreCase(System.getProperty("test.output.overwrite")); setup = new HiveTestSetup(); setup.preTest(conf); init(); }