/** * Method to fetch table data * * @param table table name * @param database database * @return list of columns in comma seperated way * @throws Exception if any error occurs */ private List<String> getTableData(String table, String database) throws Exception { HiveConf conf = new HiveConf(); conf.addResource("hive-site.xml"); ArrayList<String> results = new ArrayList<String>(); ArrayList<String> temp = new ArrayList<String>(); Hive hive = Hive.get(conf); org.apache.hadoop.hive.ql.metadata.Table tbl = hive.getTable(database, table); FetchWork work; if (!tbl.getPartCols().isEmpty()) { List<Partition> partitions = hive.getPartitions(tbl); List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>(); List<String> partLocs = new ArrayList<String>(); for (Partition part : partitions) { partLocs.add(part.getLocation()); partDesc.add(Utilities.getPartitionDesc(part)); } work = new FetchWork(partLocs, partDesc, Utilities.getTableDesc(tbl)); work.setLimit(100); } else { work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl)); } FetchTask task = new FetchTask(); task.setWork(work); task.initialize(conf, null, null); task.fetch(temp); for (String str : temp) { results.add(str.replace("\t", ",")); } return results; }
public static void main(String[] args) throws Exception { HiveConf conf = new HiveConf(); conf.addResource(new Path("file:///", System.getProperty("oozie.action.conf.xml"))); conf.setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName()); conf.setBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL, true); SessionState.start(new CliSessionState(conf)); new CliDriver().processLine(args[0]); }
/** Not a unit test - this simply runs a MiniHS2 cluster, which can be used for manual testing. */ @Test public void testRunCluster() throws Exception { if (!Boolean.parseBoolean(System.getProperty("miniHS2.run", "false"))) { return; } MiniClusterType clusterType = MiniClusterType.valueOf(System.getProperty("miniHS2.clusterType", "MR").toUpperCase()); String confFilesProperty = System.getProperty("miniHS2.conf", "../../data/conf/hive-site.xml"); boolean usePortsFromConf = Boolean.parseBoolean(System.getProperty("miniHS2.usePortsFromConf", "false")); // Load conf files String[] confFiles = confFilesProperty.split(","); int idx; for (idx = 0; idx < confFiles.length; ++idx) { String confFile = confFiles[idx]; if (confFile.isEmpty()) { continue; } HiveConf.setHiveSiteLocation(new URL("file://" + new File(confFile).toURI().getPath())); break; } HiveConf conf = new HiveConf(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true); for (; idx < confFiles.length; ++idx) { String confFile = confFiles[idx]; if (confFile.isEmpty()) { continue; } conf.addResource(new URL("file://" + new File(confFile).toURI().getPath())); } miniHS2 = new MiniHS2(conf, clusterType, usePortsFromConf); Map<String, String> confOverlay = new HashMap<String, String>(); miniHS2.start(confOverlay); miniHS2.getDFS().getFileSystem().mkdirs(new Path("/apps_staging_dir/anonymous")); System.out.println("JDBC URL avaailable at " + miniHS2.getJdbcURL()); // MiniHS2 cluster is up .. let it run until someone kills the test while (true) { Thread.sleep(1000); } }