Exemple #1
0
  @Override
  public int execute(DriverContext driverContext) {

    PrintStream out = null;
    try {
      Path resFile = new Path(work.getResFile());
      OutputStream outS = resFile.getFileSystem(conf).create(resFile);
      out = new PrintStream(outS);

      QB qb = work.getQb();
      TokenRewriteStream stream = work.getCtx().getTokenRewriteStream();
      String program = "sq rewrite";
      ASTNode ast = work.getAst();

      try {
        addRewrites(stream, qb, program, out);
        out.println(
            "\nRewritten Query:\n"
                + stream.toString(program, ast.getTokenStartIndex(), ast.getTokenStopIndex()));
      } finally {
        stream.deleteProgram(program);
      }

      out.close();
      out = null;
      return (0);
    } catch (Exception e) {
      console.printError(
          "Failed with exception " + e.getMessage(), "\n" + StringUtils.stringifyException(e));
      return (1);
    } finally {
      IOUtils.closeStream(out);
    }
  }
Exemple #2
0
  void addRewrites(TokenRewriteStream stream, QB qb, String program, PrintStream out) {
    QBSubQuery sqW = qb.getWhereClauseSubQueryPredicate();
    QBSubQuery sqH = qb.getHavingClauseSubQueryPredicate();

    if (sqW != null || sqH != null) {

      ASTNode sqNode =
          sqW != null
              ? sqW.getOriginalSubQueryASTForRewrite()
              : sqH.getOriginalSubQueryASTForRewrite();
      ASTNode tokQry = getQueryASTNode(sqNode);
      ASTNode tokFrom = (ASTNode) tokQry.getChild(0);

      StringBuilder addedJoins = new StringBuilder();

      if (sqW != null) {
        addRewrites(stream, sqW, program, out, qb.getId(), true, addedJoins);
      }

      if (sqH != null) {
        addRewrites(stream, sqH, program, out, qb.getId(), false, addedJoins);
      }
      stream.insertAfter(program, tokFrom.getTokenStopIndex(), addedJoins);
    }

    Set<String> sqAliases = qb.getSubqAliases();
    for (String sqAlias : sqAliases) {
      addRewrites(stream, qb.getSubqForAlias(sqAlias).getQB(), program, out);
    }
  }
 public String getInsertClause() {
   String insertString = "";
   ASTNode destTree = qb.getParseInfo().getDestForClause(clauseName);
   if (destTree != null
       && ((ASTNode) (destTree.getChild(0))).getToken().getType() != TOK_TMP_FILE) {
     insertString =
         "INSERT OVERWRITE" + HQLParser.getString(qb.getParseInfo().getDestForClause(clauseName));
   }
   return insertString;
 }
Exemple #4
0
  /*
   * add array<struct> to the list of columns
   */
  protected static RowResolver createSelectListRR(MatchPath evaluator, PTFInputDef inpDef)
      throws SemanticException {
    RowResolver rr = new RowResolver();
    RowResolver inputRR = inpDef.getOutputShape().getRr();

    evaluator.inputColumnNamesMap = new HashMap<String, String>();
    ArrayList<String> inputColumnNames = new ArrayList<String>();

    ArrayList<ObjectInspector> inpColOIs = new ArrayList<ObjectInspector>();

    for (ColumnInfo inpCInfo : inputRR.getColumnInfos()) {
      ColumnInfo cInfo = new ColumnInfo(inpCInfo);
      String colAlias = cInfo.getAlias();

      String[] tabColAlias = inputRR.reverseLookup(inpCInfo.getInternalName());
      if (tabColAlias != null) {
        colAlias = tabColAlias[1];
      }
      ASTNode inExpr = null;
      inExpr = PTFTranslator.getASTNode(inpCInfo, inputRR);
      if (inExpr != null) {
        rr.putExpression(inExpr, cInfo);
        colAlias = inExpr.toStringTree().toLowerCase();
      } else {
        colAlias = colAlias == null ? cInfo.getInternalName() : colAlias;
        rr.put(cInfo.getTabAlias(), colAlias, cInfo);
      }

      evaluator.inputColumnNamesMap.put(cInfo.getInternalName(), colAlias);
      inputColumnNames.add(colAlias);
      inpColOIs.add(cInfo.getObjectInspector());
    }

    StandardListObjectInspector pathAttrOI =
        ObjectInspectorFactory.getStandardListObjectInspector(
            ObjectInspectorFactory.getStandardStructObjectInspector(inputColumnNames, inpColOIs));

    ColumnInfo pathColumn =
        new ColumnInfo(
            PATHATTR_NAME,
            TypeInfoUtils.getTypeInfoFromObjectInspector(pathAttrOI),
            null,
            false,
            false);
    rr.put(null, PATHATTR_NAME, pathColumn);

    return rr;
  }
  static void updateAliasMap(ASTNode root, CubeQueryContext cubeql) {
    if (root == null) {
      return;
    }

    if (root.getToken().getType() == TOK_SELEXPR) {
      ASTNode alias = HQLParser.findNodeByPath(root, Identifier);
      if (alias != null) {
        cubeql.addExprToAlias(root, alias);
      }
    } else {
      for (int i = 0; i < root.getChildCount(); i++) {
        updateAliasMap((ASTNode) root.getChild(i), cubeql);
      }
    }
  }
    @Override
    protected ExprNodeColumnDesc processQualifiedColRef(
        TypeCheckCtx ctx, ASTNode expr, Object... nodeOutputs) throws SemanticException {
      String tableAlias =
          BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getChild(0).getText());
      // NOTE: tableAlias must be a valid non-ambiguous table alias,
      // because we've checked that in TOK_TABLE_OR_COL's process method.
      ColumnInfo colInfo =
          getColInfo(
              (JoinTypeCheckCtx) ctx,
              tableAlias,
              ((ExprNodeConstantDesc) nodeOutputs[1]).getValue().toString(),
              expr);

      if (colInfo == null) {
        ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)), expr);
        return null;
      }
      return new ExprNodeColumnDesc(
          colInfo.getType(), colInfo.getInternalName(), tableAlias, colInfo.getIsVirtualCol());
    }
    @Override
    public Object process(
        Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs)
        throws SemanticException {

      JoinTypeCheckCtx ctx = (JoinTypeCheckCtx) procCtx;
      if (ctx.getError() != null) {
        return null;
      }

      ASTNode expr = (ASTNode) nd;
      ASTNode parent = stack.size() > 1 ? (ASTNode) stack.get(stack.size() - 2) : null;

      if (expr.getType() != HiveParser.TOK_TABLE_OR_COL) {
        ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr), expr);
        return null;
      }

      assert (expr.getChildCount() == 1);
      String tableOrCol = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText());

      boolean qualifiedAccess = (parent != null && parent.getType() == HiveParser.DOT);

      ColumnInfo colInfo = null;
      if (!qualifiedAccess) {
        colInfo = getColInfo(ctx, null, tableOrCol, expr);
        // It's a column.
        return new ExprNodeColumnDesc(colInfo);
      } else if (hasTableAlias(ctx, tableOrCol, expr)) {
        return null;
      } else {
        // Qualified column access for which table was not found
        throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(expr));
      }
    }
 @VisibleForTesting
 protected static AccessURI extractPartition(ASTNode ast) throws SemanticException {
   for (int i = 0; i < ast.getChildCount(); i++) {
     ASTNode child = (ASTNode) ast.getChild(i);
     if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION
         && child.getChildCount() == 1) {
       return parseURI(BaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
     }
   }
   return null;
 }
    private void resolveClause(CubeQueryContext query, ASTNode node) throws LensException {
      if (node == null) {
        return;
      }

      int nodeType = node.getToken().getType();
      if (nodeType == HiveParser.DOT) {
        String colName = HQLParser.getColName(node).toLowerCase();
        if (!pickedReferences.containsKey(colName)) {
          return;
        }
        // No need to create a new node,
        // replace the table name identifier and column name identifier
        ASTNode tableNode = (ASTNode) node.getChild(0);
        ASTNode tabident = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier);

        PickedReference refered = getPickedReference(colName, tabident.getText().toLowerCase());
        if (refered == null) {
          return;
        }
        ASTNode newTableNode =
            new ASTNode(
                new CommonToken(HiveParser.Identifier, refered.getChainRef().getChainName()));
        tableNode.setChild(0, newTableNode);

        ASTNode newColumnNode =
            new ASTNode(
                new CommonToken(HiveParser.Identifier, refered.getChainRef().getRefColumn()));
        node.setChild(1, newColumnNode);
      } else {
        // recurse down
        for (int i = 0; i < node.getChildCount(); i++) {
          ASTNode child = (ASTNode) node.getChild(i);
          resolveClause(query, child);
        }
      }
    }
Exemple #10
0
  boolean isCubeMeasure(ASTNode node) {
    String tabname = null;
    String colname;
    int nodeType = node.getToken().getType();
    if (!(nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT)) {
      return false;
    }

    if (nodeType == HiveParser.TOK_TABLE_OR_COL) {
      colname = ((ASTNode) node.getChild(0)).getText();
    } else {
      // node in 'alias.column' format
      ASTNode tabident = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier);
      ASTNode colIdent = (ASTNode) node.getChild(1);

      colname = colIdent.getText();
      tabname = tabident.getText();
    }

    String msrname = StringUtils.isBlank(tabname) ? colname : tabname + "." + colname;

    return isCubeMeasure(msrname);
  }
  /**
   * Pre-analyze hook called after compilation and before semantic analysis We extract things for to
   * Database and metadata level operations which are not capture in the input/output entities
   * during semantic analysis. Ideally it should be handled in Hive. We need to move most of these
   * into hive semantic analyzer and then remove it from the access hook.
   */
  @Override
  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
      throws SemanticException {

    switch (ast.getToken().getType()) {
        // Hive parser doesn't capture the database name in output entity, so we store it here for
        // now
      case HiveParser.TOK_CREATEDATABASE:
      case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
      case HiveParser.TOK_DROPDATABASE:
      case HiveParser.TOK_SWITCHDATABASE:
      case HiveParser.TOK_DESCDATABASE:
        currDB = new Database(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()));
        break;
      case HiveParser.TOK_CREATETABLE:
      case HiveParser.TOK_CREATEVIEW:
        /*
         * Compiler doesn't create read/write entities for create table.
         * Hence we need extract dbname from db.tab format, if applicable
         */
        currDB = extractDatabase((ASTNode) ast.getChild(0));
        break;
      case HiveParser.TOK_DROPTABLE:
      case HiveParser.TOK_DROPVIEW:
      case HiveParser.TOK_SHOW_CREATETABLE:
      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
      case HiveParser.TOK_ALTERVIEW_ADDPARTS:
      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
      case HiveParser.TOK_ALTERVIEW_RENAME:
      case HiveParser.TOK_CREATEINDEX:
      case HiveParser.TOK_DROPINDEX:
      case HiveParser.TOK_LOCKTABLE:
      case HiveParser.TOK_UNLOCKTABLE:
        currTab = extractTable((ASTNode) ast.getFirstChildWithType(HiveParser.TOK_TABNAME));
        currDB = extractDatabase((ASTNode) ast.getChild(0));
        break;
      case HiveParser.TOK_ALTERINDEX_REBUILD:
        currTab = extractTable((ASTNode) ast.getChild(0)); // type is not TOK_TABNAME
        currDB = extractDatabase((ASTNode) ast.getChild(0));
        break;
      case HiveParser.TOK_SHOW_TABLESTATUS:
        currDB = extractDatabase((ASTNode) ast.getChild(0));
        int children = ast.getChildCount();
        for (int i = 1; i < children; i++) {
          ASTNode child = (ASTNode) ast.getChild(i);
          if (child.getToken().getType() == HiveParser.Identifier) {
            currDB = new Database(child.getText());
            break;
          }
        }
        // loosing the requested privileges for possible wildcard tables, since
        // further authorization will be done at the filter step and those unwanted will
        // eventually be filtered out from the output
        currTab = Table.ALL;
        break;
      case HiveParser.TOK_ALTERTABLE_RENAME:
      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
      case HiveParser.TOK_ALTERTABLE_DROPPARTS:
      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
      case HiveParser.TOK_ALTERTABLE_ADDCOLS:
      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
      case HiveParser.TOK_SHOW_TBLPROPERTIES:
      case HiveParser.TOK_SHOWINDEXES:
      case HiveParser.TOK_SHOWPARTITIONS:
        // token name TOK_TABNAME is not properly set in this case
        currTab = extractTable((ASTNode) ast.getChild(0));
        currDB = extractDatabase((ASTNode) ast.getChild(0));
        break;
      case HiveParser.TOK_MSCK:
        // token name TOK_TABNAME is not properly set in this case and child(0) does
        // not contain the table name.
        // TODO: Fix Hive to capture the table and DB name
        currOutTab = extractTable((ASTNode) ast.getChild(1));
        currOutDB = extractDatabase((ASTNode) ast.getChild(0));
        break;
      case HiveParser.TOK_ALTERTABLE_ADDPARTS:
        /*
         * Compiler doesn't create read/write entities for create table.
         * Hence we need extract dbname from db.tab format, if applicable
         */
        currTab = extractTable((ASTNode) ast.getChild(0));
        currDB = extractDatabase((ASTNode) ast.getChild(0));
        partitionURI = extractPartition(ast);
        break;
      case HiveParser.TOK_CREATEFUNCTION:
        String udfClassName = BaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
        try {
          CodeSource udfSrc =
              Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader())
                  .getProtectionDomain()
                  .getCodeSource();
          if (udfSrc == null) {
            throw new SemanticException("Could not resolve the jar for UDF class " + udfClassName);
          }
          String udfJar = udfSrc.getLocation().getPath();
          if (udfJar == null || udfJar.isEmpty()) {
            throw new SemanticException(
                "Could not find the jar for UDF class " + udfClassName + "to validate privileges");
          }
          udfURI = parseURI(udfSrc.getLocation().toString(), true);
        } catch (ClassNotFoundException e) {
          throw new SemanticException("Error retrieving udf class:" + e.getMessage(), e);
        }
        // create/drop function is allowed with any database
        currDB = Database.ALL;
        break;
      case HiveParser.TOK_DROPFUNCTION:
        // create/drop function is allowed with any database
        currDB = Database.ALL;
        break;

      case HiveParser.TOK_LOAD:
        String dbName =
            BaseSemanticAnalyzer.unescapeIdentifier(
                ast.getChild(1).getChild(0).getChild(0).getText());
        currDB = new Database(dbName);
        break;
      case HiveParser.TOK_DESCTABLE:
        currDB = getCanonicalDb();
        // For DESCRIBE FORMATTED/EXTENDED ast will have an additional child node with value
        // "FORMATTED/EXTENDED".
        isDescTableBasic = (ast.getChildCount() == 1);
        break;
      case HiveParser.TOK_TRUNCATETABLE:
        // SENTRY-826:
        // Truncate empty partitioned table should throw SemanticException only if the
        // user does not have permission.
        // In postAnalyze, currOutDB and currOutTbl will be added into outputHierarchy
        // which will be validated in the hiveAuthzBinding.authorize method.
        Preconditions.checkArgument(ast.getChildCount() == 1);
        // childcount is 1 for table without partition, 2 for table with partitions
        Preconditions.checkArgument(ast.getChild(0).getChildCount() >= 1);
        Preconditions.checkArgument(ast.getChild(0).getChild(0).getChildCount() == 1);
        currOutDB = extractDatabase((ASTNode) ast.getChild(0));
        currOutTab = extractTable((ASTNode) ast.getChild(0).getChild(0).getChild(0));
        break;
      default:
        currDB = getCanonicalDb();
        break;
    }
    return ast;
  }
Exemple #12
0
 private ASTNode getQueryASTNode(ASTNode node) {
   while (node != null && node.getType() != HiveParser.TOK_QUERY) {
     node = (ASTNode) node.getParent();
   }
   return node;
 }
Exemple #13
0
 public void addExprToAlias(ASTNode expr, ASTNode alias) {
   exprToAlias.put(HQLParser.getString(expr).trim(), alias.getText().toLowerCase());
 }
Exemple #14
0
  void addRewrites(
      TokenRewriteStream stream,
      QBSubQuery sq,
      String program,
      PrintStream out,
      String qbAlias,
      boolean isWhere,
      StringBuilder addedJoins) {
    ASTNode sqNode = sq.getOriginalSubQueryASTForRewrite();
    ASTNode tokQry = getQueryASTNode(sqNode);
    ASTNode tokInsert = (ASTNode) tokQry.getChild(1);
    ASTNode tokWhere = null;

    for (int i = 0; i < tokInsert.getChildCount(); i++) {
      if (tokInsert.getChild(i).getType() == HiveParser.TOK_WHERE) {
        tokWhere = (ASTNode) tokInsert.getChild(i);
        break;
      }
    }

    SubQueryDiagnostic.QBSubQueryRewrite diag = sq.getDiagnostic();
    String sqStr = diag.getRewrittenQuery();
    String joinCond = diag.getJoiningCondition();

    /*
     * the SubQuery predicate has been hoisted as a Join. The SubQuery predicate is replaced
     * by a 'true' predicate in the Outer QB's where/having clause.
     */
    stream.replace(program, sqNode.getTokenStartIndex(), sqNode.getTokenStopIndex(), "1 = 1");

    String sqJoin = " " + getJoinKeyWord(sq) + " " + sqStr + " " + joinCond;
    addedJoins.append(" ").append(sqJoin);

    String postJoinCond = diag.getOuterQueryPostJoinCond();
    if (postJoinCond != null) {
      stream.insertAfter(program, tokWhere.getTokenStopIndex(), " and " + postJoinCond);
    }

    String qualifier = isWhere ? "Where Clause " : "Having Clause ";
    if (qbAlias != null) {
      qualifier = qualifier + "for Query Block '" + qbAlias + "' ";
    }
    out.println(String.format("\n%s Rewritten SubQuery:\n%s", qualifier, diag.getRewrittenQuery()));
    out.println(
        String.format(
            "\n%s SubQuery Joining Condition:\n%s", qualifier, diag.getJoiningCondition()));
  }
Exemple #15
0
  public void print() {
    if (!log.isDebugEnabled()) {
      return;
    }
    StringBuilder builder = new StringBuilder();
    builder.append("ASTNode:" + ast.dump() + "\n");
    builder.append("QB:");
    builder.append("\n numJoins:" + qb.getNumJoins());
    builder.append("\n numGbys:" + qb.getNumGbys());
    builder.append("\n numSels:" + qb.getNumSels());
    builder.append("\n numSelDis:" + qb.getNumSelDi());
    builder.append("\n aliasToTabs:");
    Set<String> tabAliases = qb.getTabAliases();
    for (String alias : tabAliases) {
      builder.append("\n\t" + alias + ":" + qb.getTabNameForAlias(alias));
    }
    builder.append("\n aliases:");
    for (String alias : qb.getAliases()) {
      builder.append(alias);
      builder.append(", ");
    }
    builder.append("id:" + qb.getId());
    builder.append("isQuery:" + qb.getIsQuery());
    builder.append("\n QBParseInfo");
    QBParseInfo parseInfo = qb.getParseInfo();
    builder.append("\n isSubQ: " + parseInfo.getIsSubQ());
    builder.append("\n alias: " + parseInfo.getAlias());
    if (parseInfo.getJoinExpr() != null) {
      builder.append("\n joinExpr: " + parseInfo.getJoinExpr().dump());
    }
    builder.append("\n hints: " + parseInfo.getHints());
    builder.append("\n aliasToSrc: ");
    for (String alias : tabAliases) {
      builder.append("\n\t" + alias + ": " + parseInfo.getSrcForAlias(alias).dump());
    }
    TreeSet<String> clauses = new TreeSet<String>(parseInfo.getClauseNames());
    for (String clause : clauses) {
      builder.append("\n\t" + clause + ": " + parseInfo.getClauseNamesForDest());
    }
    String clause = clauses.first();
    if (parseInfo.getWhrForClause(clause) != null) {
      builder.append("\n whereexpr: " + parseInfo.getWhrForClause(clause).dump());
    }
    if (parseInfo.getGroupByForClause(clause) != null) {
      builder.append("\n groupby expr: " + parseInfo.getGroupByForClause(clause).dump());
    }
    if (parseInfo.getSelForClause(clause) != null) {
      builder.append("\n sel expr: " + parseInfo.getSelForClause(clause).dump());
    }
    if (parseInfo.getHavingForClause(clause) != null) {
      builder.append("\n having expr: " + parseInfo.getHavingForClause(clause).dump());
    }
    if (parseInfo.getDestLimit(clause) != null) {
      builder.append("\n limit: " + parseInfo.getDestLimit(clause));
    }
    if (parseInfo.getAllExprToColumnAlias() != null
        && !parseInfo.getAllExprToColumnAlias().isEmpty()) {
      builder.append("\n exprToColumnAlias:");
      for (Map.Entry<ASTNode, String> entry : parseInfo.getAllExprToColumnAlias().entrySet()) {
        builder.append("\n\t expr: " + entry.getKey().dump() + " ColumnAlias: " + entry.getValue());
      }
    }
    if (parseInfo.getAggregationExprsForClause(clause) != null) {
      builder.append("\n aggregateexprs:");
      for (Map.Entry<String, ASTNode> entry :
          parseInfo.getAggregationExprsForClause(clause).entrySet()) {
        builder.append("\n\t key: " + entry.getKey() + " expr: " + entry.getValue().dump());
      }
    }
    if (parseInfo.getDistinctFuncExprsForClause(clause) != null) {
      builder.append("\n distinctFuncExprs:");
      for (ASTNode entry : parseInfo.getDistinctFuncExprsForClause(clause)) {
        builder.append("\n\t expr: " + entry.dump());
      }
    }

    if (qb.getQbJoinTree() != null) {
      builder.append("\n\n JoinTree");
      QBJoinTree joinTree = qb.getQbJoinTree();
      printJoinTree(joinTree, builder);
    }

    if (qb.getParseInfo().getDestForClause(clause) != null) {
      builder.append("\n Destination:");
      builder.append("\n\t dest expr:" + qb.getParseInfo().getDestForClause(clause).dump());
    }
    log.debug(builder.toString());
  }
  static ASTNode replaceAliases(ASTNode node, int nodePos, Map<String, String> colToTableAlias) {
    if (node == null) {
      return node;
    }

    int nodeType = node.getToken().getType();
    if (nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT) {
      String colName = HQLParser.getColName(node);
      String newAlias = colToTableAlias.get(colName.toLowerCase());

      if (StringUtils.isBlank(newAlias)) {
        return node;
      }

      if (nodeType == HiveParser.DOT) {
        // No need to create a new node, just replace the table name ident
        ASTNode aliasNode = (ASTNode) node.getChild(0);
        ASTNode newAliasIdent = new ASTNode(new CommonToken(HiveParser.Identifier, newAlias));
        aliasNode.setChild(0, newAliasIdent);
      } else {
        // Just a column ref, we need to make it alias.col
        // '.' will become the parent node
        ASTNode dot = new ASTNode(new CommonToken(HiveParser.DOT, "."));
        ASTNode aliasIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, newAlias));
        ASTNode tabRefNode =
            new ASTNode(new CommonToken(HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL"));

        tabRefNode.addChild(aliasIdentNode);
        dot.addChild(tabRefNode);

        ASTNode colIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, colName));
        dot.addChild(colIdentNode);

        ASTNode parent = (ASTNode) node.getParent();
        if (parent != null) {
          parent.setChild(nodePos, dot);
        } else {
          return dot;
        }
      }
    } else {
      // recurse down
      for (int i = 0; i < node.getChildCount(); i++) {
        ASTNode child = (ASTNode) node.getChild(i);
        replaceAliases(child, i, colToTableAlias);
      }
    }
    return node;
  }
  @Override
  public void postAnalyze(
      HiveSemanticAnalyzerHookContext context, List<Task<? extends Serializable>> rootTasks)
      throws SemanticException {

    try {

      switch (ast.getToken().getType()) {
        case HiveParser.TOK_CREATETABLE:
        case HiveParser.TOK_CREATEDATABASE:
        case HiveParser.TOK_ALTERTABLE_PARTITION:

          // HCat will allow these operations to be performed.
          // Database DDL
        case HiveParser.TOK_SHOWDATABASES:
        case HiveParser.TOK_DROPDATABASE:
        case HiveParser.TOK_SWITCHDATABASE:
        case HiveParser.TOK_DESCDATABASE:
        case HiveParser.TOK_ALTERDATABASE_PROPERTIES:

          // Index DDL
        case HiveParser.TOK_ALTERINDEX_PROPERTIES:
        case HiveParser.TOK_CREATEINDEX:
        case HiveParser.TOK_DROPINDEX:
        case HiveParser.TOK_SHOWINDEXES:

          // View DDL
          // case HiveParser.TOK_ALTERVIEW_ADDPARTS:
        case HiveParser.TOK_ALTERVIEW_DROPPARTS:
        case HiveParser.TOK_ALTERVIEW_PROPERTIES:
        case HiveParser.TOK_ALTERVIEW_RENAME:
        case HiveParser.TOK_CREATEVIEW:
        case HiveParser.TOK_DROPVIEW:

          // Authorization DDL
        case HiveParser.TOK_CREATEROLE:
        case HiveParser.TOK_DROPROLE:
        case HiveParser.TOK_GRANT_ROLE:
        case HiveParser.TOK_GRANT_WITH_OPTION:
        case HiveParser.TOK_GRANT:
        case HiveParser.TOK_REVOKE_ROLE:
        case HiveParser.TOK_REVOKE:
        case HiveParser.TOK_SHOW_GRANT:
        case HiveParser.TOK_SHOW_ROLE_GRANT:

          // Misc DDL
        case HiveParser.TOK_LOCKTABLE:
        case HiveParser.TOK_UNLOCKTABLE:
        case HiveParser.TOK_SHOWLOCKS:
        case HiveParser.TOK_DESCFUNCTION:
        case HiveParser.TOK_SHOWFUNCTIONS:
        case HiveParser.TOK_EXPLAIN:

          // Table DDL
        case HiveParser.TOK_ALTERTABLE_ADDPARTS:
        case HiveParser.TOK_ALTERTABLE_ADDCOLS:
        case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
        case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
        case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
        case HiveParser.TOK_ALTERTABLE_DROPPARTS:
        case HiveParser.TOK_ALTERTABLE_PROPERTIES:
        case HiveParser.TOK_ALTERTABLE_RENAME:
        case HiveParser.TOK_ALTERTABLE_RENAMECOL:
        case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
        case HiveParser.TOK_ALTERTABLE_SERIALIZER:
        case HiveParser.TOK_ALTERTABLE_TOUCH:
        case HiveParser.TOK_DESCTABLE:
        case HiveParser.TOK_DROPTABLE:
        case HiveParser.TOK_SHOW_TABLESTATUS:
        case HiveParser.TOK_SHOWPARTITIONS:
        case HiveParser.TOK_SHOWTABLES:
          break;

        default:
          throw new HCatException(
              ErrorType.ERROR_INTERNAL_EXCEPTION, "Unexpected token: " + ast.getToken());
      }

      authorizeDDL(context, rootTasks);

    } catch (HCatException e) {
      throw new SemanticException(e);
    } catch (HiveException e) {
      throw new SemanticException(e);
    }

    if (hook != null) {
      hook.postAnalyze(context, rootTasks);
    }
  }
  @Override
  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
      throws SemanticException {

    this.ast = ast;
    switch (ast.getToken().getType()) {

        // HCat wants to intercept following tokens and special-handle them.
      case HiveParser.TOK_CREATETABLE:
        hook = new CreateTableHook();
        return hook.preAnalyze(context, ast);

      case HiveParser.TOK_CREATEDATABASE:
        hook = new CreateDatabaseHook();
        return hook.preAnalyze(context, ast);

      case HiveParser.TOK_ALTERTABLE_PARTITION:
        if (((ASTNode) ast.getChild(1)).getToken().getType()
            == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
          return ast;
        } else if (((ASTNode) ast.getChild(1)).getToken().getType()
            == HiveParser.TOK_ALTERTABLE_MERGEFILES) {
          // unsupported
          throw new SemanticException("Operation not supported.");
        } else {
          return ast;
        }

        // HCat will allow these operations to be performed.
        // Database DDL
      case HiveParser.TOK_SHOWDATABASES:
      case HiveParser.TOK_DROPDATABASE:
      case HiveParser.TOK_SWITCHDATABASE:
      case HiveParser.TOK_DESCDATABASE:
      case HiveParser.TOK_ALTERDATABASE_PROPERTIES:

        // Index DDL
      case HiveParser.TOK_ALTERINDEX_PROPERTIES:
      case HiveParser.TOK_CREATEINDEX:
      case HiveParser.TOK_DROPINDEX:
      case HiveParser.TOK_SHOWINDEXES:

        // View DDL
        // "alter view add partition" does not work because of the nature of implementation
        // of the DDL in hive. Hive will internally invoke another Driver on the select statement,
        // and HCat does not let "select" statement through. I cannot find a way to get around it
        // without modifying hive code. So just leave it unsupported.
        // case HiveParser.TOK_ALTERVIEW_ADDPARTS:
      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
      case HiveParser.TOK_ALTERVIEW_RENAME:
      case HiveParser.TOK_CREATEVIEW:
      case HiveParser.TOK_DROPVIEW:

        // Authorization DDL
      case HiveParser.TOK_CREATEROLE:
      case HiveParser.TOK_DROPROLE:
      case HiveParser.TOK_GRANT_ROLE:
      case HiveParser.TOK_GRANT_WITH_OPTION:
      case HiveParser.TOK_GRANT:
      case HiveParser.TOK_REVOKE_ROLE:
      case HiveParser.TOK_REVOKE:
      case HiveParser.TOK_SHOW_GRANT:
      case HiveParser.TOK_SHOW_ROLE_GRANT:

        // Misc DDL
      case HiveParser.TOK_LOCKTABLE:
      case HiveParser.TOK_UNLOCKTABLE:
      case HiveParser.TOK_SHOWLOCKS:
      case HiveParser.TOK_DESCFUNCTION:
      case HiveParser.TOK_SHOWFUNCTIONS:
      case HiveParser.TOK_EXPLAIN:

        // Table DDL
      case HiveParser.TOK_ALTERTABLE_ADDPARTS:
      case HiveParser.TOK_ALTERTABLE_ADDCOLS:
      case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
      case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
      case HiveParser.TOK_ALTERTABLE_DROPPARTS:
      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
      case HiveParser.TOK_ALTERTABLE_RENAME:
      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
      case HiveParser.TOK_ALTERTABLE_TOUCH:
      case HiveParser.TOK_DESCTABLE:
      case HiveParser.TOK_DROPTABLE:
      case HiveParser.TOK_SHOW_TABLESTATUS:
      case HiveParser.TOK_SHOWPARTITIONS:
      case HiveParser.TOK_SHOWTABLES:
        return ast;

        // In all other cases, throw an exception. Its a white-list of allowed operations.
      default:
        throw new SemanticException("Operation not supported.");
    }
  }