示例#1
0
    public synchronized QueryExplanation explain() throws BeeswaxException {
      assertState(QueryState.INITIALIZED);
      // By manipulating the query, this will make errors harder to find.
      query.query = "EXPLAIN " + query.query;
      checkedCompile();

      int ret;
      if (0 != (ret = driver.execute())) {
        throwException(new RuntimeException("Failed to execute: EXPLAIN " + ret));
      }
      StringBuilder sb = new StringBuilder();
      ArrayList<String> v = new ArrayList<String>();
      try {
        while (driver.getResults(v)) {
          for (String s : v) {
            sb.append(s);
            sb.append("\n");
          }
          v.clear();
        }
      } catch (IOException e) {
        throwException(new RuntimeException(e));
      } finally {
        // Don't let folks re-use the state object.
        state = QueryState.FINISHED;
      }
      return new QueryExplanation(sb.toString());
    }
示例#2
0
  @Override
  public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
    validateDefaultFetchOrientation(orientation);
    assertState(OperationState.FINISHED);

    RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion());

    try {
      /* if client is requesting fetch-from-start and its not the first time reading from this operation
       * then reset the fetch position to beginning
       */
      if (orientation.equals(FetchOrientation.FETCH_FIRST) && fetchStarted) {
        driver.resetFetch();
      }
      fetchStarted = true;
      driver.setMaxRows((int) maxRows);
      if (driver.getResults(convey)) {
        return decode(convey, rowSet);
      }
      return rowSet;
    } catch (IOException e) {
      throw new HiveSQLException(e);
    } catch (CommandNeedRetryException e) {
      throw new HiveSQLException(e);
    } catch (Exception e) {
      throw new HiveSQLException(e);
    } finally {
      convey.clear();
    }
  }
示例#3
0
    private void materializeResults(Results r, boolean startOver) throws IOException {
      if (driver.getPlan().getFetchTask() == null) {
        // This query is never going to return anything.
        r.has_more = false;
        r.setData(Collections.<String>emptyList());
        r.setColumns(Collections.<String>emptyList());
        return;
      }

      if (startOver) {
        // This is totally inappropriately reaching into internals.
        driver.getPlan().getFetchTask().initialize(hiveConf, driver.getPlan(), null);
        startRow = 0;
      }

      ArrayList<String> v = new ArrayList<String>();
      r.setData(v);
      r.has_more = driver.getResults(v);
      r.start_row = startRow;
      startRow += v.size();

      r.setColumns(new ArrayList<String>());
      try {
        for (FieldSchema f : driver.getSchema().getFieldSchemas()) {
          r.addToColumns(f.getName());
        }
      } catch (Exception e) {
        // An empty partitioned table may not have table description
        LOG.error("Error getting column names of results.", e);
      }
    }
    /**
     * Fetches all the rows in a result set.
     *
     * @return All the rows in a result set of a query executed using execute method.
     *     <p>TODO: Currently the server buffers all the rows before returning them to the client.
     *     Decide whether the buffering should be done in the client.
     */
    public List<String> fetchAll() throws HiveServerException, TException {
      if (!isHiveQuery)
        // Return no results if the last command was not a Hive query
        return new Vector<String>();

      Vector<String> rows = new Vector<String>();
      Vector<String> result = new Vector<String>();
      try {
        while (driver.getResults(result)) {
          rows.addAll(result);
          result.clear();
        }
      } catch (IOException e) {
        throw new HiveServerException(e.getMessage());
      }
      return rows;
    }
    /**
     * Fetches numRows rows.
     *
     * @param numRows Number of rows to fetch.
     * @return A list of rows. The size of the list is numRows if there are at least numRows rows
     *     available to return. The size is smaller than numRows if there aren't enough rows. The
     *     list will be empty if there is no more row to fetch or numRows == 0.
     * @throws HiveServerException Invalid value for numRows (numRows < 0)
     */
    public List<String> fetchN(int numRows) throws HiveServerException, TException {
      if (numRows < 0) {
        throw new HiveServerException("Invalid argument for number of rows: " + numRows);
      }
      if (!isHiveQuery)
        // Return no results if the last command was not a Hive query
        return new Vector<String>();

      Vector<String> result = new Vector<String>();
      driver.setMaxRows(numRows);
      try {
        driver.getResults(result);
      } catch (IOException e) {
        throw new HiveServerException(e.getMessage());
      }
      return result;
    }
    /**
     * Fetches the next row in a query result set.
     *
     * @return the next row in a query result set. null if there is no more row to fetch.
     */
    public String fetchOne() throws HiveServerException, TException {
      if (!isHiveQuery)
        // Return no results if the last command was not a Hive query
        return "";

      Vector<String> result = new Vector<String>();
      driver.setMaxRows(1);
      try {
        if (driver.getResults(result)) {
          return result.get(0);
        }
        // TODO: Cannot return null here because thrift cannot handle nulls
        // TODO: Returning empty string for now. Need to figure out how to
        // TODO: return null in some other way
        return "";
      } catch (IOException e) {
        throw new HiveServerException(e.getMessage());
      }
    }
示例#7
0
  @Override
  public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
    assertState(OperationState.FINISHED);
    ArrayList<String> rows = new ArrayList<String>();
    driver.setMaxRows((int) maxRows);

    try {
      driver.getResults(rows);

      getSerDe();
      StructObjectInspector soi = (StructObjectInspector) serde.getObjectInspector();
      List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
      RowSet rowSet = new RowSet();

      Object[] deserializedFields = new Object[fieldRefs.size()];
      Object rowObj;
      ObjectInspector fieldOI;

      for (String rowString : rows) {
        rowObj = serde.deserialize(new BytesWritable(rowString.getBytes()));
        for (int i = 0; i < fieldRefs.size(); i++) {
          StructField fieldRef = fieldRefs.get(i);
          fieldOI = fieldRef.getFieldObjectInspector();
          deserializedFields[i] =
              convertLazyToJava(soi.getStructFieldData(rowObj, fieldRef), fieldOI);
        }
        rowSet.addRow(resultSchema, deserializedFields);
      }
      return rowSet;
    } catch (IOException e) {
      throw new HiveSQLException(e);
    } catch (CommandNeedRetryException e) {
      throw new HiveSQLException(e);
    } catch (Exception e) {
      throw new HiveSQLException(e);
    }
  }
示例#8
0
  public int processCmd(String cmd) throws TException, HiveException {
    SessionState ss = SessionState.get();
    ss.setiscli(true);

    String cmd_trimmed = cmd.trim();
    String[] tokens = cmd_trimmed.split("\\s+");
    String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
    int ret = 0;

    if (tokens[0].equalsIgnoreCase("delete")) {
      Vector<String> nexttoken = new Vector<String>();
      nexttoken.add("jar");
      nexttoken.add("file");
      nexttoken.add("from");
      if (tokens.length < 2 || !nexttoken.contains(tokens[1].toLowerCase())) {
        String errorMessage =
            "\nif delete resource:\n"
                + "Usage: delete [FILE|JAR] <value> [<value>]*\n"
                + "if delete table rows:\n"
                + "Usage: delete from tableName [where searchCondition]";
        console.printError(errorMessage);
        ret = 1;

        return ret;
      }
    }

    if (tokens[0].equalsIgnoreCase("dfs") || tokens[0].equalsIgnoreCase("zktest")) {
      String errorMessage = "\ntdw hive do not support " + tokens[0].toLowerCase() + " operation\n";
      throw new HiveException(errorMessage);
    }

    if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {

      System.exit(0);

    } else if (cmd_trimmed.startsWith("!")) {

      String shell_cmd = cmd_trimmed.substring(1);

      try {
        Process executor = Runtime.getRuntime().exec(shell_cmd);
        StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, ss.out);
        StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, ss.err);

        outPrinter.start();
        errPrinter.start();

        ret = executor.waitFor();
        if (ret != 0) {
          console.printError("Command failed with exit code = " + ret);
        }
      } catch (Exception e) {
        console.printError(
            "Exception raised from Shell command " + e.getLocalizedMessage(),
            org.apache.hadoop.util.StringUtils.stringifyException(e));
        ret = 1;
      }

    } else if (tokens[0].toLowerCase().equals("list")) {

      SessionState.ResourceType t;
      if (tokens.length < 2 || (t = SessionState.find_resource_type(tokens[1])) == null) {
        console.printError(
            "Usage: list ["
                + StringUtils.join(SessionState.ResourceType.values(), "|")
                + "] [<value> [<value>]*]");
        ret = 1;
      } else {
        List<String> filter = null;
        if (tokens.length >= 3) {
          System.arraycopy(tokens, 2, tokens, 0, tokens.length - 2);
          filter = Arrays.asList(tokens);
        }
        Set<String> s = ss.list_resource(t, filter);
        if (s != null && !s.isEmpty()) ss.out.println(StringUtils.join(s, "\n"));
      }

    } else {
      CommandProcessor proc = CommandProcessorFactory.get(tokens);
      if (proc != null) {
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          PrintStream out = ss.out;
          long start = System.currentTimeMillis();

          try {
            ret = qp.run(cmd);
          } catch (Exception e1) {
            e1.printStackTrace();
          }
          if (ret != 0) {
            qp.close();
            return ret;
          }

          Vector<String> res = new Vector<String>();
          try {
            while (qp.getResults(res)) {
              for (String r : res) {
                out.println(r);
              }
              res.clear();
              if (out.checkError()) {
                break;
              }
            }
          } catch (IOException e) {
            console.printError(
                "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
          }

          int cret = qp.close();
          if (ret == 0) {
            ret = cret;
          }

          long end = System.currentTimeMillis();
          if (end > start) {
            double timeTaken = (double) (end - start) / 1000.0;
            console.printInfo("Time taken: " + timeTaken + " seconds", null);
          }

        } else {
          try {
            ret = proc.run(cmd_1);
          } catch (Exception e) {
            e.printStackTrace();
          }
        }
      }
    }

    return ret;
  }