private void executeOnFailureHooks(
      HiveSemanticAnalyzerHookContext context, HiveOperation hiveOp, AuthorizationException e) {
    SentryOnFailureHookContext hookCtx =
        new SentryOnFailureHookContextImpl(
            context.getCommand(),
            context.getInputs(),
            context.getOutputs(),
            hiveOp,
            currDB,
            currTab,
            udfURI,
            null,
            context.getUserName(),
            context.getIpAddress(),
            e,
            context.getConf());
    String csHooks =
        authzConf.get(HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();

    try {
      for (Hook aofh : getHooks(csHooks)) {
        ((SentryOnFailureHook) aofh).run(hookCtx);
      }
    } catch (Exception ex) {
      LOG.error("Error executing hook:", ex);
    }
  }
 private Subject getCurrentSubject(HiveSemanticAnalyzerHookContext context) {
   // Extract the username from the hook context
   return new Subject(context.getUserName());
 }
  /**
   * Convert the input/output entities into authorizables. generate authorizables for cases like
   * Database and metadata operations where the compiler doesn't capture entities. invoke the hive
   * binding to validate permissions
   *
   * @param context
   * @param stmtAuthObject
   * @param stmtOperation
   * @throws AuthorizationException
   */
  private void authorizeWithHiveBindings(
      HiveSemanticAnalyzerHookContext context,
      HiveAuthzPrivileges stmtAuthObject,
      HiveOperation stmtOperation)
      throws AuthorizationException {
    Set<ReadEntity> inputs = context.getInputs();
    Set<WriteEntity> outputs = context.getOutputs();
    List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
    List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();

    if (LOG.isDebugEnabled()) {
      LOG.debug("stmtAuthObject.getOperationScope() = " + stmtAuthObject.getOperationScope());
      LOG.debug("context.getInputs() = " + context.getInputs());
      LOG.debug("context.getOutputs() = " + context.getOutputs());
    }

    // Workaround to allow DESCRIBE <table> to be executed with only column-level privileges, while
    // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level.
    // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, which only requires
    // column
    // level privs.
    if (isDescTableBasic) {
      stmtAuthObject = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
    }

    switch (stmtAuthObject.getOperationScope()) {
      case SERVER:
        // validate server level privileges if applicable. Eg create UDF,register jar etc ..
        List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
        serverHierarchy.add(hiveAuthzBinding.getAuthServer());
        inputHierarchy.add(serverHierarchy);
        break;
      case DATABASE:
        // workaround for database scope statements (create/alter/drop db)
        List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
        dbHierarchy.add(hiveAuthzBinding.getAuthServer());
        dbHierarchy.add(currDB);
        inputHierarchy.add(dbHierarchy);
        outputHierarchy.add(dbHierarchy);

        getInputHierarchyFromInputs(inputHierarchy, inputs);
        break;
      case TABLE:
        // workaround for add partitions
        if (partitionURI != null) {
          inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), partitionURI));
        }

        getInputHierarchyFromInputs(inputHierarchy, inputs);
        for (WriteEntity writeEntity : outputs) {
          if (filterWriteEntity(writeEntity)) {
            continue;
          }
          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
          entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
          outputHierarchy.add(entityHierarchy);
        }
        // workaround for metadata queries.
        // Capture the table name in pre-analyze and include that in the input entity list
        if (currTab != null) {
          List<DBModelAuthorizable> externalAuthorizableHierarchy =
              new ArrayList<DBModelAuthorizable>();
          externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
          externalAuthorizableHierarchy.add(currDB);
          externalAuthorizableHierarchy.add(currTab);
          inputHierarchy.add(externalAuthorizableHierarchy);
        }

        // workaround for DDL statements
        // Capture the table name in pre-analyze and include that in the output entity list
        if (currOutTab != null) {
          List<DBModelAuthorizable> externalAuthorizableHierarchy =
              new ArrayList<DBModelAuthorizable>();
          externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
          externalAuthorizableHierarchy.add(currOutDB);
          externalAuthorizableHierarchy.add(currOutTab);
          outputHierarchy.add(externalAuthorizableHierarchy);
        }
        break;
      case FUNCTION:
        /* The 'FUNCTION' privilege scope currently used for
         *  - CREATE TEMP FUNCTION
         *  - DROP TEMP FUNCTION.
         */
        if (udfURI != null) {
          List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>();
          udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
          udfUriHierarchy.add(udfURI);
          inputHierarchy.add(udfUriHierarchy);
          for (WriteEntity writeEntity : outputs) {
            List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
            entityHierarchy.add(hiveAuthzBinding.getAuthServer());
            entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
            outputHierarchy.add(entityHierarchy);
          }
        }
        break;
      case CONNECT:
        /* The 'CONNECT' is an implicit privilege scope currently used for
         *  - USE <db>
         *  It's allowed when the user has any privilege on the current database. For application
         *  backward compatibility, we allow (optional) implicit connect permission on 'default' db.
         */
        List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>();
        connectHierarchy.add(hiveAuthzBinding.getAuthServer());
        // by default allow connect access to default db
        Table currTbl = Table.ALL;
        Column currCol = Column.ALL;
        if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName())
            && "false"
                .equalsIgnoreCase(
                    authzConf.get(
                        HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
          currDB = Database.ALL;
          currTbl = Table.SOME;
        }

        connectHierarchy.add(currDB);
        connectHierarchy.add(currTbl);
        connectHierarchy.add(currCol);

        inputHierarchy.add(connectHierarchy);
        outputHierarchy.add(connectHierarchy);
        break;
      case COLUMN:
        for (ReadEntity readEntity : inputs) {
          if (readEntity.getAccessedColumns() != null
              && !readEntity.getAccessedColumns().isEmpty()) {
            addColumnHierarchy(inputHierarchy, readEntity);
          } else {
            List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
            entityHierarchy.add(hiveAuthzBinding.getAuthServer());
            entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
            entityHierarchy.add(Column.ALL);
            inputHierarchy.add(entityHierarchy);
          }
        }
        break;
      default:
        throw new AuthorizationException(
            "Unknown operation scope type " + stmtAuthObject.getOperationScope().toString());
    }

    HiveAuthzBinding binding = null;
    try {
      binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName());
    } catch (SemanticException e) {
      // Will use the original hiveAuthzBinding
      binding = hiveAuthzBinding;
    }
    // validate permission
    binding.authorize(
        stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy, outputHierarchy);
  }
  /** Post analyze hook that invokes hive auth bindings */
  @Override
  public void postAnalyze(
      HiveSemanticAnalyzerHookContext context, List<Task<? extends Serializable>> rootTasks)
      throws SemanticException {
    HiveOperation stmtOperation = getCurrentHiveStmtOp();
    HiveAuthzPrivileges stmtAuthObject;

    stmtAuthObject = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(stmtOperation);

    // must occur above the null check on stmtAuthObject
    // since GRANT/REVOKE/etc are not authorized by binding layer at present
    Subject subject = getCurrentSubject(context);
    Set<String> subjectGroups = hiveAuthzBinding.getGroups(subject);
    for (Task<? extends Serializable> task : rootTasks) {
      if (task instanceof SentryGrantRevokeTask) {
        SentryGrantRevokeTask sentryTask = (SentryGrantRevokeTask) task;
        sentryTask.setHiveAuthzBinding(hiveAuthzBinding);
        sentryTask.setAuthzConf(authzConf);
        sentryTask.setSubject(subject);
        sentryTask.setSubjectGroups(subjectGroups);
        sentryTask.setIpAddress(context.getIpAddress());
        sentryTask.setOperation(stmtOperation);
      }
    }

    try {
      if (stmtAuthObject == null) {
        // We don't handle authorizing this statement
        return;
      }

      /**
       * Replace DDLTask using the SentryFilterDDLTask for protection, such as "show column" only
       * allow show some column that user can access to. SENTRY-847
       */
      for (int i = 0; i < rootTasks.size(); i++) {
        Task<? extends Serializable> task = rootTasks.get(i);
        if (task instanceof DDLTask) {
          SentryFilterDDLTask filterTask =
              new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation);
          filterTask.setWork((DDLWork) task.getWork());
          rootTasks.set(i, filterTask);
        }
      }

      authorizeWithHiveBindings(context, stmtAuthObject, stmtOperation);
    } catch (AuthorizationException e) {
      executeOnFailureHooks(context, stmtOperation, e);
      String permsRequired = "";
      for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) {
        permsRequired += perm + ";";
      }
      SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, permsRequired);
      String msgForLog =
          HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
              + "\n Required privileges for this query: "
              + permsRequired;
      String msgForConsole =
          HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
              + "\n "
              + e.getMessage()
              + "\n The required privileges: "
              + permsRequired;
      // AuthorizationException is not a real exception, use the info level to record this.
      LOG.info(msgForLog);
      throw new SemanticException(msgForConsole, e);
    } finally {
      hiveAuthzBinding.close();
    }

    if ("true"
        .equalsIgnoreCase(context.getConf().get(HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION))) {
      throw new SemanticException(
          HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR
              + " Mock query compilation aborted. Set "
              + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION
              + " to 'false' for normal query processing");
    }
  }
Beispiel #5
0
  @Override
  protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work)
      throws HiveException {
    // DB opereations, none of them are enforced by Hive right now.

    ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc();
    if (showDatabases != null) {
      authorize(
          HiveOperation.SHOWDATABASES.getInputRequiredPrivileges(),
          HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges());
    }

    DropDatabaseDesc dropDb = work.getDropDatabaseDesc();
    if (dropDb != null) {
      Database db = cntxt.getHive().getDatabase(dropDb.getDatabaseName());
      authorize(db, Privilege.DROP);
    }

    DescDatabaseDesc descDb = work.getDescDatabaseDesc();
    if (descDb != null) {
      Database db = cntxt.getHive().getDatabase(descDb.getDatabaseName());
      authorize(db, Privilege.SELECT);
    }

    SwitchDatabaseDesc switchDb = work.getSwitchDatabaseDesc();
    if (switchDb != null) {
      Database db = cntxt.getHive().getDatabase(switchDb.getDatabaseName());
      authorize(db, Privilege.SELECT);
    }

    ShowTablesDesc showTables = work.getShowTblsDesc();
    if (showTables != null) {
      String dbName =
          showTables.getDbName() == null
              ? SessionState.get().getCurrentDatabase()
              : showTables.getDbName();
      authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
    }

    ShowTableStatusDesc showTableStatus = work.getShowTblStatusDesc();
    if (showTableStatus != null) {
      String dbName =
          showTableStatus.getDbName() == null
              ? SessionState.get().getCurrentDatabase()
              : showTableStatus.getDbName();
      authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
    }

    // TODO: add alter database support in HCat

    // Table operations.

    DropTableDesc dropTable = work.getDropTblDesc();
    if (dropTable != null) {
      if (dropTable.getPartSpecs() == null) {
        // drop table is already enforced by Hive. We only check for table level location even if
        // the
        // table is partitioned.
      } else {
        // this is actually a ALTER TABLE DROP PARITITION statement
        for (DropTableDesc.PartSpec partSpec : dropTable.getPartSpecs()) {
          // partitions are not added as write entries in drop partitions in Hive
          Table table =
              hive.getTable(SessionState.get().getCurrentDatabase(), dropTable.getTableName());
          List<Partition> partitions = null;
          try {
            partitions = hive.getPartitionsByFilter(table, partSpec.getPartSpec().getExprString());
          } catch (Exception e) {
            throw new HiveException(e);
          }

          for (Partition part : partitions) {
            authorize(part, Privilege.DROP);
          }
        }
      }
    }

    AlterTableDesc alterTable = work.getAlterTblDesc();
    if (alterTable != null) {
      Table table =
          hive.getTable(SessionState.get().getCurrentDatabase(), alterTable.getOldName(), false);

      Partition part = null;
      if (alterTable.getPartSpec() != null) {
        part = hive.getPartition(table, alterTable.getPartSpec(), false);
      }

      String newLocation = alterTable.getNewLocation();

      /* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
       * for the old table/partition location and the new location.
       */
      if (alterTable.getOp() == AlterTableDesc.AlterTableTypes.ALTERLOCATION) {
        if (part != null) {
          authorize(part, Privilege.ALTER_DATA); // authorize for the old
          // location, and new location
          part.setLocation(newLocation);
          authorize(part, Privilege.ALTER_DATA);
        } else {
          authorize(table, Privilege.ALTER_DATA); // authorize for the old
          // location, and new location
          table.getTTable().getSd().setLocation(newLocation);
          authorize(table, Privilege.ALTER_DATA);
        }
      }
      // other alter operations are already supported by Hive
    }

    // we should be careful when authorizing table based on just the
    // table name. If columns have separate authorization domain, it
    // must be honored
    DescTableDesc descTable = work.getDescTblDesc();
    if (descTable != null) {
      String tableName = extractTableName(descTable.getTableName());
      authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
    }

    ShowPartitionsDesc showParts = work.getShowPartsDesc();
    if (showParts != null) {
      String tableName = extractTableName(showParts.getTabName());
      authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
    }
  }