/** Raise error if the given query contains transforms */
  @Override
  public void run(HookContext hookContext) throws Exception {
    HiveAuthzBinding hiveAuthzBinding = HiveAuthzBinding.get(hookContext.getConf());
    try {
      QueryPlan qPlan = hookContext.getQueryPlan();
      if ((qPlan == null) || (qPlan.getQueryProperties() == null)) {
        return;
      }
      // validate server level permissions permission for transforms
      if (qPlan.getQueryProperties().usesScript()) {
        if (hiveAuthzBinding == null) {
          LOG.warn("No authorization binding found, skipping the authorization for transform");
          return;
        }
        List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
        List<List<DBModelAuthorizable>> outputHierarchy =
            new ArrayList<List<DBModelAuthorizable>>();
        List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();

        serverHierarchy.add(hiveAuthzBinding.getAuthServer());
        inputHierarchy.add(serverHierarchy);
        hiveAuthzBinding.authorize(
            HiveOperation.QUERY,
            HiveAuthzPrivilegesMap.getHiveExtendedAuthzPrivileges(HiveExtendedOperation.TRANSFORM),
            new Subject(hookContext.getUserName()),
            inputHierarchy,
            outputHierarchy);
      }
    } finally {
      if (hiveAuthzBinding != null) {
        hiveAuthzBinding.clear(hookContext.getConf());
      }
    }
  }
  public static List<FieldSchema> filterShowColumns(
      HiveAuthzBinding hiveAuthzBinding,
      List<FieldSchema> cols,
      HiveOperation operation,
      String userName,
      String tableName,
      String dbName)
      throws SemanticException {
    List<FieldSchema> filteredResult = new ArrayList<FieldSchema>();
    Subject subject = new Subject(userName);
    HiveAuthzPrivileges columnMetaDataPrivilege =
        HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
    HiveAuthzBinding hiveBindingWithPrivilegeCache =
        getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);

    Database database = new Database(dbName);
    Table table = new Table(tableName);
    for (FieldSchema col : cols) {
      // if user has privileges on column, add to filtered list, else discard
      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
      List<DBModelAuthorizable> externalAuthorizableHierarchy =
          new ArrayList<DBModelAuthorizable>();
      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
      externalAuthorizableHierarchy.add(database);
      externalAuthorizableHierarchy.add(table);
      externalAuthorizableHierarchy.add(new Column(col.getName()));
      inputHierarchy.add(externalAuthorizableHierarchy);

      try {
        // do the authorization by new HiveAuthzBinding with PrivilegeCache
        hiveBindingWithPrivilegeCache.authorize(
            operation, columnMetaDataPrivilege, subject, inputHierarchy, outputHierarchy);
        filteredResult.add(col);
      } catch (AuthorizationException e) {
        // squash the exception, user doesn't have privileges, so the column is
        // not added to
        // filtered list.
      }
    }
    return filteredResult;
  }
  /**
   * Convert the input/output entities into authorizables. generate authorizables for cases like
   * Database and metadata operations where the compiler doesn't capture entities. invoke the hive
   * binding to validate permissions
   *
   * @param context
   * @param stmtAuthObject
   * @param stmtOperation
   * @throws AuthorizationException
   */
  private void authorizeWithHiveBindings(
      HiveSemanticAnalyzerHookContext context,
      HiveAuthzPrivileges stmtAuthObject,
      HiveOperation stmtOperation)
      throws AuthorizationException {
    Set<ReadEntity> inputs = context.getInputs();
    Set<WriteEntity> outputs = context.getOutputs();
    List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
    List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();

    if (LOG.isDebugEnabled()) {
      LOG.debug("stmtAuthObject.getOperationScope() = " + stmtAuthObject.getOperationScope());
      LOG.debug("context.getInputs() = " + context.getInputs());
      LOG.debug("context.getOutputs() = " + context.getOutputs());
    }

    // Workaround to allow DESCRIBE <table> to be executed with only column-level privileges, while
    // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level.
    // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, which only requires
    // column
    // level privs.
    if (isDescTableBasic) {
      stmtAuthObject = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
    }

    switch (stmtAuthObject.getOperationScope()) {
      case SERVER:
        // validate server level privileges if applicable. Eg create UDF,register jar etc ..
        List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
        serverHierarchy.add(hiveAuthzBinding.getAuthServer());
        inputHierarchy.add(serverHierarchy);
        break;
      case DATABASE:
        // workaround for database scope statements (create/alter/drop db)
        List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
        dbHierarchy.add(hiveAuthzBinding.getAuthServer());
        dbHierarchy.add(currDB);
        inputHierarchy.add(dbHierarchy);
        outputHierarchy.add(dbHierarchy);

        getInputHierarchyFromInputs(inputHierarchy, inputs);
        break;
      case TABLE:
        // workaround for add partitions
        if (partitionURI != null) {
          inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), partitionURI));
        }

        getInputHierarchyFromInputs(inputHierarchy, inputs);
        for (WriteEntity writeEntity : outputs) {
          if (filterWriteEntity(writeEntity)) {
            continue;
          }
          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
          entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
          outputHierarchy.add(entityHierarchy);
        }
        // workaround for metadata queries.
        // Capture the table name in pre-analyze and include that in the input entity list
        if (currTab != null) {
          List<DBModelAuthorizable> externalAuthorizableHierarchy =
              new ArrayList<DBModelAuthorizable>();
          externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
          externalAuthorizableHierarchy.add(currDB);
          externalAuthorizableHierarchy.add(currTab);
          inputHierarchy.add(externalAuthorizableHierarchy);
        }

        // workaround for DDL statements
        // Capture the table name in pre-analyze and include that in the output entity list
        if (currOutTab != null) {
          List<DBModelAuthorizable> externalAuthorizableHierarchy =
              new ArrayList<DBModelAuthorizable>();
          externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
          externalAuthorizableHierarchy.add(currOutDB);
          externalAuthorizableHierarchy.add(currOutTab);
          outputHierarchy.add(externalAuthorizableHierarchy);
        }
        break;
      case FUNCTION:
        /* The 'FUNCTION' privilege scope currently used for
         *  - CREATE TEMP FUNCTION
         *  - DROP TEMP FUNCTION.
         */
        if (udfURI != null) {
          List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>();
          udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
          udfUriHierarchy.add(udfURI);
          inputHierarchy.add(udfUriHierarchy);
          for (WriteEntity writeEntity : outputs) {
            List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
            entityHierarchy.add(hiveAuthzBinding.getAuthServer());
            entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
            outputHierarchy.add(entityHierarchy);
          }
        }
        break;
      case CONNECT:
        /* The 'CONNECT' is an implicit privilege scope currently used for
         *  - USE <db>
         *  It's allowed when the user has any privilege on the current database. For application
         *  backward compatibility, we allow (optional) implicit connect permission on 'default' db.
         */
        List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>();
        connectHierarchy.add(hiveAuthzBinding.getAuthServer());
        // by default allow connect access to default db
        Table currTbl = Table.ALL;
        Column currCol = Column.ALL;
        if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName())
            && "false"
                .equalsIgnoreCase(
                    authzConf.get(
                        HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
          currDB = Database.ALL;
          currTbl = Table.SOME;
        }

        connectHierarchy.add(currDB);
        connectHierarchy.add(currTbl);
        connectHierarchy.add(currCol);

        inputHierarchy.add(connectHierarchy);
        outputHierarchy.add(connectHierarchy);
        break;
      case COLUMN:
        for (ReadEntity readEntity : inputs) {
          if (readEntity.getAccessedColumns() != null
              && !readEntity.getAccessedColumns().isEmpty()) {
            addColumnHierarchy(inputHierarchy, readEntity);
          } else {
            List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
            entityHierarchy.add(hiveAuthzBinding.getAuthServer());
            entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
            entityHierarchy.add(Column.ALL);
            inputHierarchy.add(entityHierarchy);
          }
        }
        break;
      default:
        throw new AuthorizationException(
            "Unknown operation scope type " + stmtAuthObject.getOperationScope().toString());
    }

    HiveAuthzBinding binding = null;
    try {
      binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName());
    } catch (SemanticException e) {
      // Will use the original hiveAuthzBinding
      binding = hiveAuthzBinding;
    }
    // validate permission
    binding.authorize(
        stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy, outputHierarchy);
  }
  /** Post analyze hook that invokes hive auth bindings */
  @Override
  public void postAnalyze(
      HiveSemanticAnalyzerHookContext context, List<Task<? extends Serializable>> rootTasks)
      throws SemanticException {
    HiveOperation stmtOperation = getCurrentHiveStmtOp();
    HiveAuthzPrivileges stmtAuthObject;

    stmtAuthObject = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(stmtOperation);

    // must occur above the null check on stmtAuthObject
    // since GRANT/REVOKE/etc are not authorized by binding layer at present
    Subject subject = getCurrentSubject(context);
    Set<String> subjectGroups = hiveAuthzBinding.getGroups(subject);
    for (Task<? extends Serializable> task : rootTasks) {
      if (task instanceof SentryGrantRevokeTask) {
        SentryGrantRevokeTask sentryTask = (SentryGrantRevokeTask) task;
        sentryTask.setHiveAuthzBinding(hiveAuthzBinding);
        sentryTask.setAuthzConf(authzConf);
        sentryTask.setSubject(subject);
        sentryTask.setSubjectGroups(subjectGroups);
        sentryTask.setIpAddress(context.getIpAddress());
        sentryTask.setOperation(stmtOperation);
      }
    }

    try {
      if (stmtAuthObject == null) {
        // We don't handle authorizing this statement
        return;
      }

      /**
       * Replace DDLTask using the SentryFilterDDLTask for protection, such as "show column" only
       * allow show some column that user can access to. SENTRY-847
       */
      for (int i = 0; i < rootTasks.size(); i++) {
        Task<? extends Serializable> task = rootTasks.get(i);
        if (task instanceof DDLTask) {
          SentryFilterDDLTask filterTask =
              new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation);
          filterTask.setWork((DDLWork) task.getWork());
          rootTasks.set(i, filterTask);
        }
      }

      authorizeWithHiveBindings(context, stmtAuthObject, stmtOperation);
    } catch (AuthorizationException e) {
      executeOnFailureHooks(context, stmtOperation, e);
      String permsRequired = "";
      for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) {
        permsRequired += perm + ";";
      }
      SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, permsRequired);
      String msgForLog =
          HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
              + "\n Required privileges for this query: "
              + permsRequired;
      String msgForConsole =
          HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
              + "\n "
              + e.getMessage()
              + "\n The required privileges: "
              + permsRequired;
      // AuthorizationException is not a real exception, use the info level to record this.
      LOG.info(msgForLog);
      throw new SemanticException(msgForConsole, e);
    } finally {
      hiveAuthzBinding.close();
    }

    if ("true"
        .equalsIgnoreCase(context.getConf().get(HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION))) {
      throw new SemanticException(
          HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR
              + " Mock query compilation aborted. Set "
              + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION
              + " to 'false' for normal query processing");
    }
  }
  /**
   * Check if current user has privileges to perform given operation type hiveOpType on the given
   * input and output objects
   *
   * @param hiveOpType
   * @param inputHObjs
   * @param outputHObjs
   * @param context
   * @throws SentryAccessControlException
   */
  @Override
  public void checkPrivileges(
      HiveOperationType hiveOpType,
      List<HivePrivilegeObject> inputHObjs,
      List<HivePrivilegeObject> outputHObjs,
      HiveAuthzContext context)
      throws HiveAuthzPluginException, HiveAccessControlException {
    if (LOG.isDebugEnabled()) {
      String msg =
          "Checking privileges for operation "
              + hiveOpType
              + " by user "
              + authenticator.getUserName()
              + " on "
              + " input objects "
              + inputHObjs
              + " and output objects "
              + outputHObjs
              + ". Context Info: "
              + context;
      LOG.debug(msg);
    }

    HiveOperation hiveOp = SentryAuthorizerUtil.convert2HiveOperation(hiveOpType.name());
    HiveAuthzPrivileges stmtAuthPrivileges = null;
    if (HiveOperation.DESCTABLE.equals(hiveOp)
        && !(context.getCommandString().contains("EXTENDED")
            || context.getCommandString().contains("FORMATTED"))) {
      stmtAuthPrivileges = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
    } else {
      stmtAuthPrivileges = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(hiveOp);
    }

    HiveAuthzBinding hiveAuthzBinding = null;
    try {
      hiveAuthzBinding = getAuthzBinding();
      if (stmtAuthPrivileges == null) {
        // We don't handle authorizing this statement
        return;
      }

      List<List<DBModelAuthorizable>> inputHierarchyList =
          SentryAuthorizerUtil.convert2SentryPrivilegeList(
              hiveAuthzBinding.getAuthServer(), inputHObjs);
      List<List<DBModelAuthorizable>> outputHierarchyList =
          SentryAuthorizerUtil.convert2SentryPrivilegeList(
              hiveAuthzBinding.getAuthServer(), outputHObjs);

      // Workaround for metadata queries
      addExtendHierarchy(
          hiveOp,
          stmtAuthPrivileges,
          inputHierarchyList,
          outputHierarchyList,
          context.getCommandString(),
          hiveAuthzBinding);

      hiveAuthzBinding.authorize(
          hiveOp,
          stmtAuthPrivileges,
          new Subject(authenticator.getUserName()),
          inputHierarchyList,
          outputHierarchyList);
    } catch (AuthorizationException e) {
      Database db = null;
      Table tab = null;
      AccessURI udfURI = null;
      AccessURI partitionURI = null;
      if (outputHObjs != null) {
        for (HivePrivilegeObject obj : outputHObjs) {
          switch (obj.getType()) {
            case DATABASE:
              db = new Database(obj.getObjectName());
              break;
            case TABLE_OR_VIEW:
              db = new Database(obj.getDbname());
              tab = new Table(obj.getObjectName());
              break;
            case PARTITION:
              db = new Database(obj.getDbname());
              tab = new Table(obj.getObjectName());
            case LOCAL_URI:
            case DFS_URI:
          }
        }
      }
      String permsRequired = "";
      SentryOnFailureHookContext hookCtx =
          new SentryOnFailureHookContextImpl(
              context.getCommandString(),
              null,
              null,
              hiveOp,
              db,
              tab,
              udfURI,
              partitionURI,
              authenticator.getUserName(),
              context.getIpAddress(),
              e,
              authzConf);
      SentryAuthorizerUtil.executeOnFailureHooks(hookCtx, authzConf);
      for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) {
        permsRequired += perm + ";";
      }
      SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, permsRequired);
      String msg =
          HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
              + "\n Required privileges for this query: "
              + permsRequired;
      throw new HiveAccessControlException(msg, e);
    } catch (Exception e) {
      throw new HiveAuthzPluginException(e.getClass() + ": " + e.getMessage(), e);
    } finally {
      if (hiveAuthzBinding != null) {
        hiveAuthzBinding.close();
      }
    }

    if ("true"
        .equalsIgnoreCase(
            SessionState.get().getConf().get(HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION))) {
      throw new HiveAccessControlException(
          HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR
              + " Mock query compilation aborted. Set "
              + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION
              + " to 'false' for normal query processing");
    }
  }