public static void baseSetup() throws Exception { MiniDFSShim dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null); fs = dfs.getFileSystem(); baseDfsDir = new Path(new Path(fs.getUri()), "/base"); fs.mkdirs(baseDfsDir); warehouseDir = new Path(baseDfsDir, "warehouse"); fs.mkdirs(warehouseDir); conf.setVar(ConfVars.METASTOREWAREHOUSE, warehouseDir.toString()); // Assuming the tests are run either in C or D drive in Windows OS! dataFileDir = conf.get("test.data.files") .replace('\\', '/') .replace("c:", "") .replace("C:", "") .replace("D:", "") .replace("d:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); // Set up scratch directory Path scratchDir = new Path(baseDfsDir, "scratchdir"); conf.setVar(HiveConf.ConfVars.SCRATCHDIR, scratchDir.toString()); // set hive conf vars conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true); conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); int port = MetaStoreUtils.findFreePort(); MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); SessionState.start(new CliSessionState(conf)); driver = new Driver(conf); setupDataTable(); }
/** * When auto-shipping hive tar (for example when hive query or pig script is submitted via * webhcat), Hive client is launched on some remote node where Hive has not been installed. We * need pass some properties to that client to make sure it connects to the right Metastore, * configures Tez, etc. Here we look for such properties in hive config, and set a comma-separated * list of key values in {@link #HIVE_PROPS_NAME}. Note that the user may choose to set the same * keys in HIVE_PROPS_NAME directly, in which case those values should take precedence. */ private void handleHiveProperties() { HiveConf hiveConf = new HiveConf(); // load hive-site.xml from classpath List<String> interestingPropNames = Arrays.asList( "hive.metastore.uris", "hive.metastore.sasl.enabled", "hive.metastore.execute.setugi", "hive.execution.engine"); // each items is a "key=value" format List<String> webhcatHiveProps = new ArrayList<String>(hiveProps()); for (String interestingPropName : interestingPropNames) { String value = hiveConf.get(interestingPropName); if (value != null) { boolean found = false; for (String whProp : webhcatHiveProps) { if (whProp.startsWith(interestingPropName + "=")) { found = true; break; } } if (!found) { webhcatHiveProps.add(interestingPropName + "=" + value); } } } StringBuilder hiveProps = new StringBuilder(); for (String whProp : webhcatHiveProps) { // make sure to escape separator char in prop values hiveProps.append(hiveProps.length() > 0 ? "," : "").append(StringUtils.escapeString(whProp)); } set(HIVE_PROPS_NAME, hiveProps.toString()); }
private CliSessionState startSessionState() throws IOException { HiveConf.setVar( conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator"); String execEngine = conf.get("hive.execution.engine"); conf.set("hive.execution.engine", "mr"); CliSessionState ss = new CliSessionState(conf); assert ss != null; ss.in = System.in; ss.out = System.out; ss.err = System.out; SessionState oldSs = SessionState.get(); if (oldSs != null && clusterType == MiniClusterType.tez) { oldSs.close(); } if (oldSs != null && oldSs.out != null && oldSs.out != System.out) { oldSs.out.close(); } SessionState.start(ss); isSessionStateStarted = true; conf.set("hive.execution.engine", execEngine); return ss; }
public void init() throws Exception { testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE); String execEngine = conf.get("hive.execution.engine"); conf.set("hive.execution.engine", "mr"); SessionState.start(conf); conf.set("hive.execution.engine", execEngine); db = Hive.get(conf); pd = new ParseDriver(); sem = new SemanticAnalyzer(conf); }
/** * Constructs the jobTracker url based on the jobId. * * @param jobID * @param conf * @return * @see org.apache.hadoop.hive.hwi#getJobTrackerURL(String) */ public static String getJobTrackerURL(String jobID, HiveConf conf) { String jt = conf.get("mapred.job.tracker"); String jth = conf.get("mapred.job.tracker.http.address"); String[] jtparts = null; String[] jthttpParts = null; if (jt.equalsIgnoreCase("local")) { jtparts = new String[2]; jtparts[0] = "local"; jtparts[1] = ""; } else { jtparts = jt.split(":"); } if (jth.contains(":")) { jthttpParts = jth.split(":"); } else { jthttpParts = new String[2]; jthttpParts[0] = jth; jthttpParts[1] = ""; } return jtparts[0] + ":" + jthttpParts[1] + "/jobdetails.jsp?jobid=" + jobID + "&refresh=30"; }
public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) { boolean depreicatedConfigFile = false; HiveAuthzConf newAuthzConf = null; String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL); if (hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) { hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL); depreicatedConfigFile = true; } if (hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) { throw new IllegalArgumentException( "Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf + "' is invalid."); } try { newAuthzConf = new HiveAuthzConf(new URL(hiveAuthzConf)); } catch (MalformedURLException e) { if (depreicatedConfigFile) { throw new IllegalArgumentException( "Configuration key " + HiveAuthzConf.HIVE_ACCESS_CONF_URL + " specifies a malformed URL '" + hiveAuthzConf + "'", e); } else { throw new IllegalArgumentException( "Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " specifies a malformed URL '" + hiveAuthzConf + "'", e); } } return newAuthzConf; }
/** * Given a Hive Configuration object - generate a command line fragment for passing such * configuration information to ExecDriver. */ public static String generateCmdLine(HiveConf hconf, Context ctx) throws IOException { HiveConf tempConf = new HiveConf(); Path hConfFilePath = new Path(ctx.getLocalTmpPath(), JOBCONF_FILENAME); OutputStream out = null; Properties deltaP = hconf.getChangedProperties(); boolean hadoopLocalMode = ShimLoader.getHadoopShims().isLocalMode(hconf); String hadoopSysDir = "mapred.system.dir"; String hadoopWorkDir = "mapred.local.dir"; for (Object one : deltaP.keySet()) { String oneProp = (String) one; if (hadoopLocalMode && (oneProp.equals(hadoopSysDir) || oneProp.equals(hadoopWorkDir))) { continue; } tempConf.set(oneProp, hconf.get(oneProp)); } // Multiple concurrent local mode job submissions can cause collisions in // working dirs and system dirs // Workaround is to rename map red working dir to a temp dir in such cases if (hadoopLocalMode) { tempConf.set(hadoopSysDir, hconf.get(hadoopSysDir) + "/" + Utilities.randGen.nextInt()); tempConf.set(hadoopWorkDir, hconf.get(hadoopWorkDir) + "/" + Utilities.randGen.nextInt()); } try { out = FileSystem.getLocal(hconf).create(hConfFilePath); tempConf.writeXml(out); } finally { if (out != null) { out.close(); } } return " -jobconffile " + hConfFilePath.toString(); }
/** * If authorization mode is v2, then pass it through authorizer so that it can apply any security * configuration changes. */ public void applyAuthorizationPolicy() throws HiveException { if (!isAuthorizationModeV2()) { // auth v1 interface does not have this functionality return; } // avoid processing the same config multiple times, check marker if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { return; } authorizerV2.applyAuthorizationConfigPolicy(conf); // set a marker that this conf has been processed. conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); }
public HiveSchemaTool(String hiveHome, HiveConf hiveConf, String dbType) throws HiveMetaException { if (hiveHome == null || hiveHome.isEmpty()) { throw new HiveMetaException("No Hive home directory provided"); } this.hiveConf = hiveConf; this.dbType = dbType; this.metaStoreSchemaInfo = new MetaStoreSchemaInfo(hiveHome, hiveConf, dbType); userName = hiveConf.get(ConfVars.METASTORE_CONNECTION_USER_NAME.varname); try { passWord = ShimLoader.getHadoopShims().getPassword(hiveConf, HiveConf.ConfVars.METASTOREPWD.varname); } catch (IOException err) { throw new HiveMetaException("Error getting metastore password", err); } }
@Override public void run(final HookContext hookContext) throws Exception { // clone to avoid concurrent access try { final HiveConf conf = new HiveConf(hookContext.getConf()); final HiveEventContext event = new HiveEventContext(); event.setInputs(hookContext.getInputs()); event.setOutputs(hookContext.getOutputs()); event.setJsonPlan(getQueryPlan(hookContext.getConf(), hookContext.getQueryPlan())); event.setHookType(hookContext.getHookType()); event.setUgi(hookContext.getUgi()); event.setUser(getUser(hookContext.getUserName())); event.setOperation(OPERATION_MAP.get(hookContext.getOperationName())); event.setQueryId(hookContext.getQueryPlan().getQueryId()); event.setQueryStr(hookContext.getQueryPlan().getQueryStr()); event.setQueryStartTime(hookContext.getQueryPlan().getQueryStartTime()); event.setQueryType(hookContext.getQueryPlan().getQueryPlan().getQueryType()); boolean sync = conf.get(CONF_SYNC, "false").equals("true"); if (sync) { fireAndForget(event); } else { executor.submit( new Runnable() { @Override public void run() { try { fireAndForget(event); } catch (Throwable e) { LOG.error("Atlas hook failed due to error ", e); } } }); } } catch (Throwable t) { LOG.error("Submitting to thread pool failed due to error ", t); } }
TimelineEntity createPreHookEvent( String queryId, String query, JSONObject explainPlan, long startTime, String user, String requestuser, int numMrJobs, int numTezJobs, String opId, String clientIpAddress, String hiveInstanceAddress, String hiveInstanceType, String sessionID, String logID, String threadId, String executionMode, List<String> tablesRead, List<String> tablesWritten, HiveConf conf) throws Exception { JSONObject queryObj = new JSONObject(new LinkedHashMap<>()); queryObj.put("queryText", query); queryObj.put("queryPlan", explainPlan); LOG.info("Received pre-hook notification for :" + queryId); if (LOG.isDebugEnabled()) { LOG.debug("Otherinfo: " + queryObj.toString()); LOG.debug("Operation id: <" + opId + ">"); } conf.stripHiddenConfigurations(conf); Map<String, String> confMap = new HashMap<String, String>(); for (Map.Entry<String, String> setting : conf) { confMap.put(setting.getKey(), setting.getValue()); } JSONObject confObj = new JSONObject((Map) confMap); TimelineEntity atsEntity = new TimelineEntity(); atsEntity.setEntityId(queryId); atsEntity.setEntityType(EntityTypes.HIVE_QUERY_ID.name()); atsEntity.addPrimaryFilter(PrimaryFilterTypes.user.name(), user); atsEntity.addPrimaryFilter(PrimaryFilterTypes.requestuser.name(), requestuser); atsEntity.addPrimaryFilter(PrimaryFilterTypes.executionmode.name(), executionMode); atsEntity.addPrimaryFilter( PrimaryFilterTypes.queue.name(), conf.get("mapreduce.job.queuename")); if (opId != null) { atsEntity.addPrimaryFilter(PrimaryFilterTypes.operationid.name(), opId); } for (String tabName : tablesRead) { atsEntity.addPrimaryFilter(PrimaryFilterTypes.tablesread.name(), tabName); } for (String tabName : tablesWritten) { atsEntity.addPrimaryFilter(PrimaryFilterTypes.tableswritten.name(), tabName); } TimelineEvent startEvt = new TimelineEvent(); startEvt.setEventType(EventTypes.QUERY_SUBMITTED.name()); startEvt.setTimestamp(startTime); atsEntity.addEvent(startEvt); atsEntity.addOtherInfo(OtherInfoTypes.QUERY.name(), queryObj.toString()); atsEntity.addOtherInfo(OtherInfoTypes.TEZ.name(), numTezJobs > 0); atsEntity.addOtherInfo(OtherInfoTypes.MAPRED.name(), numMrJobs > 0); atsEntity.addOtherInfo(OtherInfoTypes.SESSION_ID.name(), sessionID); atsEntity.addOtherInfo(OtherInfoTypes.INVOKER_INFO.name(), logID); atsEntity.addOtherInfo(OtherInfoTypes.THREAD_NAME.name(), threadId); atsEntity.addOtherInfo(OtherInfoTypes.VERSION.name(), VERSION); if (clientIpAddress != null) { atsEntity.addOtherInfo(OtherInfoTypes.CLIENT_IP_ADDRESS.name(), clientIpAddress); } atsEntity.addOtherInfo(OtherInfoTypes.HIVE_ADDRESS.name(), hiveInstanceAddress); atsEntity.addOtherInfo(OtherInfoTypes.HIVE_INSTANCE_TYPE.name(), hiveInstanceType); atsEntity.addOtherInfo(OtherInfoTypes.CONF.name(), confObj.toString()); return atsEntity; }
public HiveTestUtil( String outDir, String logDir, MiniClusterType clusterType, String confDir, String hadoopVer) throws Exception { this.outDir = outDir; this.logDir = logDir; if (confDir != null && !confDir.isEmpty()) { HiveConf.setHiveSiteLocation( new URL("file://" + new File(confDir).toURI().getPath() + "/hive-site.xml")); LOG.info("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } conf = new HiveConf(); String tmpBaseDir = System.getProperty("test.tmp.dir"); if (tmpBaseDir == null || tmpBaseDir == "") { tmpBaseDir = System.getProperty("java.io.tmpdir"); } String metaStoreURL = "jdbc:derby:" + tmpBaseDir + File.separator + "metastore_dbtest;" + "create=true"; conf.set(ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL); System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL); // set where derby logs File derbyLogFile = new File(tmpBaseDir + "/derby.log"); derbyLogFile.createNewFile(); System.setProperty("derby.stream.error.file", derbyLogFile.getPath()); this.hadoopVer = getHadoopMainVersion(hadoopVer); qMap = new TreeMap<String, String>(); qSkipSet = new HashSet<String>(); qSortSet = new HashSet<String>(); qSortQuerySet = new HashSet<String>(); qHashQuerySet = new HashSet<String>(); qSortNHashQuerySet = new HashSet<String>(); qJavaVersionSpecificOutput = new HashSet<String>(); this.clusterType = clusterType; // Using randomUUID for dfs cluster System.setProperty("test.build.data", "target/test-data/hive-" + UUID.randomUUID().toString()); HadoopShims shims = ShimLoader.getHadoopShims(); int numberOfDataNodes = 4; if (clusterType != MiniClusterType.none) { dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null); FileSystem fs = dfs.getFileSystem(); String uriString = WindowsPathUtil.getHdfsUriString(fs.getUri().toString()); if (clusterType == MiniClusterType.tez) { mr = shims.getMiniTezCluster(conf, 4, uriString, 1); } else { mr = shims.getMiniMrCluster(conf, 4, uriString, 1); } } initConf(); // Use the current directory if it is not specified String dataDir = conf.get("test.data.files"); if (dataDir == null) { dataDir = new File(".").getAbsolutePath() + "/data/files"; } testFiles = dataDir; // Use the current directory if it is not specified String scriptsDir = conf.get("test.data.scripts"); if (scriptsDir == null) { scriptsDir = new File(".").getAbsolutePath() + "/data/scripts"; } if (!initScript.isEmpty()) { this.initScript = scriptsDir + "/" + initScript; } if (!cleanupScript.isEmpty()) { this.cleanupScript = scriptsDir + "/" + cleanupScript; } overWrite = "true".equalsIgnoreCase(System.getProperty("test.output.overwrite")); setup = new HiveTestSetup(); setup.preTest(conf); init(); }
public class TestSSL { private static final Logger LOG = LoggerFactory.getLogger(TestSSL.class); private static final String KEY_STORE_NAME = "keystore.jks"; private static final String TRUST_STORE_NAME = "truststore.jks"; private static final String KEY_STORE_PASSWORD = "******"; private static final String JAVA_TRUST_STORE_PROP = "javax.net.ssl.trustStore"; private static final String JAVA_TRUST_STORE_PASS_PROP = "javax.net.ssl.trustStorePassword"; private static final String HS2_BINARY_MODE = "binary"; private static final String HS2_HTTP_MODE = "http"; private static final String HS2_HTTP_ENDPOINT = "cliservice"; private static final String HS2_BINARY_AUTH_MODE = "NONE"; private MiniHS2 miniHS2 = null; private static HiveConf conf = new HiveConf(); private Connection hs2Conn = null; private String dataFileDir = conf.get("test.data.files"); private Map<String, String> confOverlay; private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore=" + URLEncoder.encode(dataFileDir + File.separator + TRUST_STORE_NAME) + ";trustStorePassword="******"test.data.files", "").isEmpty()) { dataFileDir = System.getProperty("test.data.files"); } dataFileDir = dataFileDir.replace('\\', '/').replace("c:", ""); miniHS2 = new MiniHS2(conf); confOverlay = new HashMap<String, String>(); } @After public void tearDown() throws Exception { if (hs2Conn != null) { hs2Conn.close(); } if (miniHS2 != null && miniHS2.isStarted()) { miniHS2.stop(); } System.clearProperty(JAVA_TRUST_STORE_PROP); System.clearProperty(JAVA_TRUST_STORE_PASS_PROP); } private int execCommand(String cmd) throws Exception { int exitCode; try { String output = Shell.execCommand("bash", "-c", cmd); LOG.info("Output from '" + cmd + "': " + output); exitCode = 0; } catch (Shell.ExitCodeException e) { exitCode = e.getExitCode(); LOG.info("Error executing '" + cmd + "', exitCode = " + exitCode, e); } return exitCode; } /** * Tests to ensure SSLv2 and SSLv3 are disabled */ @Test public void testSSLVersion() throws Exception { // we need openssl Assume.assumeTrue(execCommand("which openssl") == 0); // we depend on linux openssl exit codes Assume.assumeTrue(System.getProperty("os.name").toLowerCase().contains("linux")); setSslConfOverlay(confOverlay); // Test in binary mode setBinaryConfOverlay(confOverlay); // Start HS2 with SSL miniHS2.start(confOverlay); // make SSL connection hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword="******"user.name"), "bar"); hs2Conn.close(); Assert.assertEquals( "Expected exit code of 1", 1, execCommand( "openssl s_client -connect " + miniHS2.getHost() + ":" + miniHS2.getBinaryPort() + " -ssl2 < /dev/null")); Assert.assertEquals( "Expected exit code of 1", 1, execCommand( "openssl s_client -connect " + miniHS2.getHost() + ":" + miniHS2.getBinaryPort() + " -ssl3 < /dev/null")); miniHS2.stop(); // Test in http mode setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); // make SSL connection try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword="******"user.name"), "bar"); Assert.fail("Expected SQLException during connect"); } catch (SQLException e) { LOG.info("Expected exception: " + e, e); Assert.assertEquals("08S01", e.getSQLState().trim()); Throwable cause = e.getCause(); Assert.assertNotNull(cause); while (cause.getCause() != null) { cause = cause.getCause(); } Assert.assertEquals("org.apache.http.NoHttpResponseException", cause.getClass().getName()); Assert.assertTrue(cause.getMessage().contains("failed to respond")); } miniHS2.stop(); } /** * * Test SSL client with non-SSL server fails * * @throws Exception */ @Test public void testInvalidConfig() throws Exception { clearSslConfOverlay(confOverlay); // Test in binary mode setBinaryConfOverlay(confOverlay); miniHS2.start(confOverlay); DriverManager.setLoginTimeout(4); try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); fail("SSL connection should fail with NON-SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } System.setProperty(JAVA_TRUST_STORE_PROP, dataFileDir + File.separator + TRUST_STORE_NAME); System.setProperty(JAVA_TRUST_STORE_PASS_PROP, KEY_STORE_PASSWORD); try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL() + ";ssl=true", System.getProperty("user.name"), "bar"); fail("SSL connection should fail with NON-SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } miniHS2.stop(); // Test in http mode with ssl properties specified in url System.clearProperty(JAVA_TRUST_STORE_PROP); System.clearProperty(JAVA_TRUST_STORE_PASS_PROP); setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); fail("SSL connection should fail with NON-SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } } /** * * Test non-SSL client with SSL server fails * * @throws Exception */ @Test public void testConnectionMismatch() throws Exception { setSslConfOverlay(confOverlay); // Test in binary mode setBinaryConfOverlay(confOverlay); miniHS2.start(confOverlay); // Start HS2 with SSL try { hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar"); fail("NON SSL connection should fail with SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL() + ";ssl=false", System.getProperty("user.name"), "bar"); fail("NON SSL connection should fail with SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } miniHS2.stop(); // Test in http mode setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); try { hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", ";ssl=false"), System.getProperty("user.name"), "bar"); fail("NON SSL connection should fail with SSL server"); } catch (SQLException e) { // expected error assertEquals("08S01", e.getSQLState().trim()); } } /** * * Test SSL client connection to SSL server * * @throws Exception */ @Test public void testSSLConnectionWithURL() throws Exception { setSslConfOverlay(confOverlay); // Test in binary mode setBinaryConfOverlay(confOverlay); // Start HS2 with SSL miniHS2.start(confOverlay); // make SSL connection hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); hs2Conn.close(); miniHS2.stop(); // Test in http mode setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); // make SSL connection hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); hs2Conn.close(); } /** * * Test SSL client connection to SSL server * * @throws Exception */ @Test public void testSSLConnectionWithProperty() throws Exception { setSslConfOverlay(confOverlay); // Test in binary mode setBinaryConfOverlay(confOverlay); // Start HS2 with SSL miniHS2.start(confOverlay); System.setProperty(JAVA_TRUST_STORE_PROP, dataFileDir + File.separator + TRUST_STORE_NAME); System.setProperty(JAVA_TRUST_STORE_PASS_PROP, KEY_STORE_PASSWORD); // make SSL connection hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL() + ";ssl=true", System.getProperty("user.name"), "bar"); hs2Conn.close(); miniHS2.stop(); // Test in http mode setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); // make SSL connection hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); hs2Conn.close(); } /** * Start HS2 in SSL mode, open a SSL connection and fetch data * * @throws Exception */ @Test public void testSSLFetch() throws Exception { setSslConfOverlay(confOverlay); // Test in binary mode setBinaryConfOverlay(confOverlay); // Start HS2 with SSL miniHS2.start(confOverlay); String tableName = "sslTab"; Path dataFilePath = new Path(dataFileDir, "kv1.txt"); // make SSL connection hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); // Set up test data setupTestTableWithData(tableName, dataFilePath, hs2Conn); Statement stmt = hs2Conn.createStatement(); ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName); int rowCount = 0; while (res.next()) { ++rowCount; assertEquals("val_" + res.getInt(1), res.getString(2)); } // read result over SSL assertEquals(500, rowCount); hs2Conn.close(); } /** * Start HS2 in Http mode with SSL enabled, open a SSL connection and fetch data * * @throws Exception */ @Test public void testSSLFetchHttp() throws Exception { setSslConfOverlay(confOverlay); // Test in http mode setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); String tableName = "sslTab"; Path dataFilePath = new Path(dataFileDir, "kv1.txt"); // make SSL connection hs2Conn = DriverManager.getConnection( miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); // Set up test data setupTestTableWithData(tableName, dataFilePath, hs2Conn); Statement stmt = hs2Conn.createStatement(); ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName); int rowCount = 0; while (res.next()) { ++rowCount; assertEquals("val_" + res.getInt(1), res.getString(2)); } // read result over SSL assertEquals(500, rowCount); hs2Conn.close(); } private void setupTestTableWithData(String tableName, Path dataFilePath, Connection hs2Conn) throws Exception { Statement stmt = hs2Conn.createStatement(); stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop table if exists " + tableName); stmt.execute( "create table " + tableName + " (under_col int comment 'the under column', value string)"); // load data stmt.execute( "load data local inpath '" + dataFilePath.toString() + "' into table " + tableName); stmt.close(); } private void setSslConfOverlay(Map<String, String> confOverlay) { confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true"); confOverlay.put( ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname, dataFileDir + File.separator + KEY_STORE_NAME); confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, KEY_STORE_PASSWORD); } private void clearSslConfOverlay(Map<String, String> confOverlay) { confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "false"); } private void setHttpConfOverlay(Map<String, String> confOverlay) { confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_HTTP_MODE); confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, HS2_HTTP_ENDPOINT); confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true"); } private void setBinaryConfOverlay(Map<String, String> confOverlay) { confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_BINARY_MODE); confOverlay.put(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, HS2_BINARY_AUTH_MODE); confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true"); } }
/** * Open a session, and create a table for cases usage * * @throws Exception */ @Before public void setUp() throws Exception { dataFile = new File(hiveConf.get("test.data.files"), "kv1.txt"); client = miniHS2.getServiceClient(); sessionHandle = setupSession(); }
/** * Get config property * * @param propertyKey * @return */ public String getConfProperty(String propertyKey) { return hiveConf.get(propertyKey); }