Beispiel #1
0
 private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException {
   try {
     // In Hive server mode, we are not able to retry in the FetchTask
     // case, when calling fetch queries since execute() has returned.
     // For now, we disable the test attempts.
     driver.setTryCount(Integer.MAX_VALUE);
     response = driver.run();
     if (0 != response.getResponseCode()) {
       throw toSQLException("Error while processing statement", response);
     }
   } catch (HiveSQLException e) {
     // If the operation was cancelled by another thread,
     // Driver#run will return a non-zero response code.
     // We will simply return if the operation state is CANCELED,
     // otherwise throw an exception
     if (getStatus().getState() == OperationState.CANCELED) {
       return;
     } else {
       setState(OperationState.ERROR);
       throw e;
     }
   } catch (Throwable e) {
     setState(OperationState.ERROR);
     throw new HiveSQLException("Error running query: " + e.toString(), e);
   }
   setState(OperationState.FINISHED);
 }
Beispiel #2
0
  /**
   * * Compile the query and extract metadata
   *
   * @param sqlOperationConf
   * @throws HiveSQLException
   */
  public void prepare(HiveConf sqlOperationConf) throws HiveSQLException {
    setState(OperationState.RUNNING);

    try {
      driver = new Driver(sqlOperationConf, getParentSession().getUserName());

      // set the operation handle information in Driver, so that thrift API users
      // can use the operation handle they receive, to lookup query information in
      // Yarn ATS
      String guid64 =
          Base64.encodeBase64URLSafeString(
                  getHandle().getHandleIdentifier().toTHandleIdentifier().getGuid())
              .trim();
      driver.setOperationId(guid64);

      // In Hive server mode, we are not able to retry in the FetchTask
      // case, when calling fetch queries since execute() has returned.
      // For now, we disable the test attempts.
      driver.setTryCount(Integer.MAX_VALUE);

      response = driver.compileAndRespond(statement);
      if (0 != response.getResponseCode()) {
        throw toSQLException("Error while compiling statement", response);
      }

      mResultSchema = driver.getSchema();

      // hasResultSet should be true only if the query has a FetchTask
      // "explain" is an exception for now
      if (driver.getPlan().getFetchTask() != null) {
        // Schema has to be set
        if (mResultSchema == null || !mResultSchema.isSetFieldSchemas()) {
          throw new HiveSQLException(
              "Error compiling query: Schema and FieldSchema "
                  + "should be set when query plan has a FetchTask");
        }
        resultSchema = new TableSchema(mResultSchema);
        setHasResultSet(true);
      } else {
        setHasResultSet(false);
      }
      // Set hasResultSet true if the plan has ExplainTask
      // TODO explain should use a FetchTask for reading
      for (Task<? extends Serializable> task : driver.getPlan().getRootTasks()) {
        if (task.getClass() == ExplainTask.class) {
          resultSchema = new TableSchema(mResultSchema);
          setHasResultSet(true);
          break;
        }
      }
    } catch (HiveSQLException e) {
      setState(OperationState.ERROR);
      throw e;
    } catch (Throwable e) {
      setState(OperationState.ERROR);
      throw new HiveSQLException("Error running query: " + e.toString(), e);
    }
  }
Beispiel #3
0
  @Override
  public void run() throws HiveSQLException {
    setState(OperationState.RUNNING);
    String statement_trimmed = statement.trim();
    String[] tokens = statement_trimmed.split("\\s");
    String cmd_1 = statement_trimmed.substring(tokens[0].length()).trim();

    int ret = 0;
    String errorMessage = "";
    String SQLState = null;

    try {
      driver = new Driver(getParentSession().getHiveConf());
      // In Hive server mode, we are not able to retry in the FetchTask
      // case, when calling fetch queries since execute() has returned.
      // For now, we disable the test attempts.
      driver.setTryCount(Integer.MAX_VALUE);

      String subStatement =
          new VariableSubstitution().substitute(getParentSession().getHiveConf(), statement);

      response = driver.run(subStatement);
      if (0 != response.getResponseCode()) {
        throw new HiveSQLException(
            "Error while processing statement: " + response.getErrorMessage(),
            response.getSQLState(),
            response.getResponseCode());
      }

      mResultSchema = driver.getSchema();
      if (mResultSchema != null && mResultSchema.isSetFieldSchemas()) {
        resultSchema = new TableSchema(mResultSchema);
        setHasResultSet(true);
      } else {
        setHasResultSet(false);
      }
    } catch (HiveSQLException e) {
      setState(OperationState.ERROR);
      throw e;
    } catch (Exception e) {
      setState(OperationState.ERROR);
      throw new HiveSQLException("Error running query: " + e.toString());
    }
    setState(OperationState.FINISHED);
  }