private void materializeResults(Results r, boolean startOver) throws IOException { if (driver.getPlan().getFetchTask() == null) { // This query is never going to return anything. r.has_more = false; r.setData(Collections.<String>emptyList()); r.setColumns(Collections.<String>emptyList()); return; } if (startOver) { // This is totally inappropriately reaching into internals. driver.getPlan().getFetchTask().initialize(hiveConf, driver.getPlan(), null); startRow = 0; } ArrayList<String> v = new ArrayList<String>(); r.setData(v); r.has_more = driver.getResults(v); r.start_row = startRow; startRow += v.size(); r.setColumns(new ArrayList<String>()); try { for (FieldSchema f : driver.getSchema().getFieldSchemas()) { r.addToColumns(f.getName()); } } catch (Exception e) { // An empty partitioned table may not have table description LOG.error("Error getting column names of results.", e); } }
/** * * Compile the query and extract metadata * * @param sqlOperationConf * @throws HiveSQLException */ public void prepare(HiveConf sqlOperationConf) throws HiveSQLException { setState(OperationState.RUNNING); try { driver = new Driver(sqlOperationConf, getParentSession().getUserName()); // set the operation handle information in Driver, so that thrift API users // can use the operation handle they receive, to lookup query information in // Yarn ATS String guid64 = Base64.encodeBase64URLSafeString( getHandle().getHandleIdentifier().toTHandleIdentifier().getGuid()) .trim(); driver.setOperationId(guid64); // In Hive server mode, we are not able to retry in the FetchTask // case, when calling fetch queries since execute() has returned. // For now, we disable the test attempts. driver.setTryCount(Integer.MAX_VALUE); response = driver.compileAndRespond(statement); if (0 != response.getResponseCode()) { throw toSQLException("Error while compiling statement", response); } mResultSchema = driver.getSchema(); // hasResultSet should be true only if the query has a FetchTask // "explain" is an exception for now if (driver.getPlan().getFetchTask() != null) { // Schema has to be set if (mResultSchema == null || !mResultSchema.isSetFieldSchemas()) { throw new HiveSQLException( "Error compiling query: Schema and FieldSchema " + "should be set when query plan has a FetchTask"); } resultSchema = new TableSchema(mResultSchema); setHasResultSet(true); } else { setHasResultSet(false); } // Set hasResultSet true if the plan has ExplainTask // TODO explain should use a FetchTask for reading for (Task<? extends Serializable> task : driver.getPlan().getRootTasks()) { if (task.getClass() == ExplainTask.class) { resultSchema = new TableSchema(mResultSchema); setHasResultSet(true); break; } } } catch (HiveSQLException e) { setState(OperationState.ERROR); throw e; } catch (Throwable e) { setState(OperationState.ERROR); throw new HiveSQLException("Error running query: " + e.toString(), e); } }
/** Get the FetchWork. Only SELECTs have them. */ private synchronized FetchWork getFetchWork() { QueryPlan plan = driver.getPlan(); FetchTask fetchTask = null; if (plan != null) { fetchTask = plan.getFetchTask(); if (fetchTask != null) { fetchTask.initialize(hiveConf, plan, null); } } if (fetchTask == null) { return null; } FetchWork work = fetchTask.getWork(); return work; }
public String getQueryStr() { return driver == null || driver.getPlan() == null ? "Unknown" : driver.getPlan().getQueryStr(); }