Example #1
0
  public RowSet getOperationLogRowSet(
      OperationHandle opHandle, FetchOrientation orientation, long maxRows, HiveConf hConf)
      throws HiveSQLException {
    TableSchema tableSchema = new TableSchema(getLogSchema());
    RowSet rowSet = RowSetFactory.create(tableSchema, getOperation(opHandle).getProtocolVersion());

    if (hConf.getBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED) == false) {
      LOG.warn(
          "Try to get operation log when hive.server2.logging.operation.enabled is false, no log will be returned. ");
      return rowSet;
    }
    // get the OperationLog object from the operation
    OperationLog operationLog = getOperation(opHandle).getOperationLog();
    if (operationLog == null) {
      throw new HiveSQLException("Couldn't find log associated with operation handle: " + opHandle);
    }

    // read logs
    List<String> logs;
    try {
      logs = operationLog.readOperationLog(isFetchFirst(orientation), maxRows);
    } catch (SQLException e) {
      throw new HiveSQLException(e.getMessage(), e.getCause());
    }

    // convert logs to RowSet
    for (String log : logs) {
      rowSet.addRow(new String[] {log});
    }

    return rowSet;
  }
Example #2
0
  @Override
  public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
    validateDefaultFetchOrientation(orientation);
    assertState(OperationState.FINISHED);

    RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion());

    try {
      /* if client is requesting fetch-from-start and its not the first time reading from this operation
       * then reset the fetch position to beginning
       */
      if (orientation.equals(FetchOrientation.FETCH_FIRST) && fetchStarted) {
        driver.resetFetch();
      }
      fetchStarted = true;
      driver.setMaxRows((int) maxRows);
      if (driver.getResults(convey)) {
        return decode(convey, rowSet);
      }
      return rowSet;
    } catch (IOException e) {
      throw new HiveSQLException(e);
    } catch (CommandNeedRetryException e) {
      throw new HiveSQLException(e);
    } catch (Exception e) {
      throw new HiveSQLException(e);
    } finally {
      convey.clear();
    }
  }
  /* (non-Javadoc)
   * @see org.apache.hive.service.cli.operation.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
   */
  @Override
  public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
    validateDefaultFetchOrientation(orientation);
    if (orientation.equals(FetchOrientation.FETCH_FIRST)) {
      resetResultReader();
    }
    List<String> rows = readResults((int) maxRows);
    RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion());

    for (String row : rows) {
      rowSet.addRow(new String[] {row});
    }
    return rowSet;
  }
Example #4
0
  /**
   * Get the execution logs of the given SQL statement. This method is a public API for usage
   * outside of Hive, although it is not part of the interface java.sql.Statement.
   *
   * @param incremental indicate getting logs either incrementally or from the beginning, when it is
   *     true or false.
   * @param fetchSize the number of lines to fetch
   * @return a list of logs. It can be empty if there are no new logs to be retrieved at that time.
   * @throws SQLException
   * @throws ClosedOrCancelledStatementException if statement has been cancelled or closed
   */
  public List<String> getQueryLog(boolean incremental, int fetchSize)
      throws SQLException, ClosedOrCancelledStatementException {
    checkConnection("getQueryLog");
    if (isCancelled) {
      throw new ClosedOrCancelledStatementException(
          "Method getQueryLog() failed. The " + "statement has been closed or cancelled.");
    }

    List<String> logs = new ArrayList<String>();
    TFetchResultsResp tFetchResultsResp = null;
    try {
      if (stmtHandle != null) {
        TFetchResultsReq tFetchResultsReq =
            new TFetchResultsReq(stmtHandle, getFetchOrientation(incremental), fetchSize);
        tFetchResultsReq.setFetchType((short) 1);
        tFetchResultsResp = client.FetchResults(tFetchResultsReq);
        Utils.verifySuccessWithInfo(tFetchResultsResp.getStatus());
      } else {
        if (isQueryClosed) {
          throw new ClosedOrCancelledStatementException(
              "Method getQueryLog() failed. The " + "statement has been closed or cancelled.");
        }
        if (isExecuteStatementFailed) {
          throw new SQLException(
              "Method getQueryLog() failed. Because the stmtHandle in "
                  + "HiveStatement is null and the statement execution might fail.");
        } else {
          return logs;
        }
      }
    } catch (SQLException e) {
      throw e;
    } catch (Exception e) {
      throw new SQLException("Error when getting query log: " + e, e);
    }

    RowSet rowSet = RowSetFactory.create(tFetchResultsResp.getResults(), connection.getProtocol());
    for (Object[] row : rowSet) {
      logs.add(String.valueOf(row[0]));
    }
    return logs;
  }
 protected GetSchemasOperation(HiveSession parentSession, String catalogName, String schemaName) {
   super(parentSession, OperationType.GET_SCHEMAS);
   this.catalogName = catalogName;
   this.schemaName = schemaName;
   this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
 }