Пример #1
0
  @Override
  public void start(CoprocessorEnvironment env) {
    this.env = (RegionCoprocessorEnvironment) env;
    random = new SecureRandom();
    conf = env.getConfiguration();
    baseStagingDir = SecureBulkLoadUtil.getBaseStagingDir(conf);
    this.userProvider = UserProvider.instantiate(conf);

    try {
      fs = FileSystem.get(conf);
      fs.mkdirs(baseStagingDir, PERM_HIDDEN);
      fs.setPermission(baseStagingDir, PERM_HIDDEN);
      // no sticky bit in hadoop-1.0, making directory nonempty so it never gets erased
      fs.mkdirs(new Path(baseStagingDir, "DONOTERASE"), PERM_HIDDEN);
      FileStatus status = fs.getFileStatus(baseStagingDir);
      if (status == null) {
        throw new IllegalStateException("Failed to create staging directory");
      }
      if (!status.getPermission().equals(PERM_HIDDEN)) {
        throw new IllegalStateException(
            "Directory already exists but permissions aren't set to '-rwx--x--x' ");
      }
    } catch (IOException e) {
      throw new IllegalStateException("Failed to get FileSystem instance", e);
    }
  }
 private Connection getUnsecuredHBaseClient(Configuration hbaseConf)
     throws InterruptedException, URISyntaxException, LoginException, IOException {
   SystemEnvironment systemEnvironment = new SystemEnvironment();
   Configuration conf = HBaseConfiguration.create(hbaseConf);
   User user =
       UserProvider.instantiate(hbaseConf)
           .create(
               UserGroupInformation.createRemoteUser(
                   systemEnvironment.getVariable(SystemEnvironment.KRB_USER)));
   return ConnectionFactory.createConnection(conf, user);
 }
Пример #3
0
  public void startServletContainer(Configuration conf) throws Exception {
    if (server != null) {
      LOG.error("ServletContainer already running");
      return;
    }

    // Inject the conf for the test by being first to make singleton
    RESTServlet.getInstance(conf, UserProvider.instantiate(conf));

    // set up the Jersey servlet container for Jetty
    ResourceConfig app =
        new ResourceConfig()
            .packages("org.apache.hadoop.hbase.rest")
            .register(Jackson1Feature.class);
    ServletHolder sh = new ServletHolder(new ServletContainer(app));

    // set up Jetty and run the embedded server
    server = new Server(0);
    LOG.info("configured " + ServletContainer.class.getName());

    HttpConfiguration httpConfig = new HttpConfiguration();
    httpConfig.setSendDateHeader(false);
    httpConfig.setSendServerVersion(false);
    ServerConnector serverConnector =
        new ServerConnector(server, new HttpConnectionFactory(httpConfig));
    serverConnector.setPort(testServletPort);

    server.addConnector(serverConnector);

    // set up context
    ServletContextHandler ctxHandler =
        new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS);
    ctxHandler.addServlet(sh, "/*");
    // Load filters specified from configuration.
    String[] filterClasses =
        conf.getStrings(Constants.FILTER_CLASSES, ArrayUtils.EMPTY_STRING_ARRAY);
    for (String filter : filterClasses) {
      filter = filter.trim();
      ctxHandler.addFilter(filter, "/*", EnumSet.of(DispatcherType.REQUEST));
    }
    LOG.info("Loaded filter classes :" + filterClasses);

    conf.set(RESTServer.REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*");
    RESTServer.addCSRFFilter(ctxHandler, conf);

    HttpServerUtil.constrainHttpMethods(ctxHandler);

    // start the server
    server.start();
    // get the port
    testServletPort = ((ServerConnector) server.getConnectors()[0]).getLocalPort();

    LOG.info("started " + server.getClass().getName() + " on port " + testServletPort);
  }
  /**
   * Constructor with existing configuration
   *
   * @param conf existing configuration
   * @param userProvider the login user provider
   * @throws IOException
   */
  RESTServlet(final Configuration conf, final UserProvider userProvider) throws IOException {
    this.realUser = userProvider.getCurrent().getUGI();
    this.conf = conf;

    int cleanInterval = conf.getInt(CLEANUP_INTERVAL, 10 * 1000);
    int maxIdleTime = conf.getInt(MAX_IDLETIME, 10 * 60 * 1000);
    connectionCache = new ConnectionCache(conf, userProvider, cleanInterval, maxIdleTime);
    if (supportsProxyuser()) {
      ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
    }
  }
Пример #5
0
  private User getActiveUser() {
    User user = RequestContext.getRequestUser();
    if (!RequestContext.isInRequestContext()) {
      return null;
    }

    // this is for testing
    if (userProvider.isHadoopSecurityEnabled()
        && "simple".equalsIgnoreCase(conf.get(User.HBASE_SECURITY_CONF_KEY))) {
      return User.createUserForTesting(conf, user.getShortName(), new String[] {});
    }

    return user;
  }
  private Connection getSecuredHBaseClient(Configuration hbaseConf)
      throws InterruptedException, URISyntaxException, LoginException, IOException {
    LOGGER.info("Trying kerberos authentication");
    KrbLoginManager loginManager =
        KrbLoginManagerFactory.getInstance()
            .getKrbLoginManagerInstance(
                kerberosHbaseProperties.getKdc(), kerberosHbaseProperties.getRealm());

    SystemEnvironment systemEnvironment = new SystemEnvironment();
    Subject subject =
        loginManager.loginWithCredentials(
            systemEnvironment.getVariable(SystemEnvironment.KRB_USER),
            systemEnvironment.getVariable(SystemEnvironment.KRB_PASSWORD).toCharArray());
    loginManager.loginInHadoop(subject, hbaseConf);
    Configuration conf = HBaseConfiguration.create(hbaseConf);
    User user =
        UserProvider.instantiate(conf).create(UserGroupInformation.getUGIFromSubject(subject));
    return ConnectionFactory.createConnection(conf, user);
  }
Пример #7
0
  @Override
  public void secureBulkLoadHFiles(
      RpcController controller,
      SecureBulkLoadHFilesRequest request,
      RpcCallback<SecureBulkLoadHFilesResponse> done) {
    final List<Pair<byte[], String>> familyPaths = new ArrayList<Pair<byte[], String>>();
    for (ClientProtos.BulkLoadHFileRequest.FamilyPath el : request.getFamilyPathList()) {
      familyPaths.add(new Pair(el.getFamily().toByteArray(), el.getPath()));
    }

    Token userToken = null;
    if (userProvider.isHadoopSecurityEnabled()) {
      userToken =
          new Token(
              request.getFsToken().getIdentifier().toByteArray(),
              request.getFsToken().getPassword().toByteArray(),
              new Text(request.getFsToken().getKind()),
              new Text(request.getFsToken().getService()));
    }
    final String bulkToken = request.getBulkToken();
    User user = getActiveUser();
    final UserGroupInformation ugi = user.getUGI();
    if (userToken != null) {
      ugi.addToken(userToken);
    } else if (userProvider.isHadoopSecurityEnabled()) {
      // we allow this to pass through in "simple" security mode
      // for mini cluster testing
      ResponseConverter.setControllerException(
          controller, new DoNotRetryIOException("User token cannot be null"));
      done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
      return;
    }

    HRegion region = env.getRegion();
    boolean bypass = false;
    if (region.getCoprocessorHost() != null) {
      try {
        bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
      } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
        done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
        return;
      }
    }
    boolean loaded = false;
    if (!bypass) {
      // Get the target fs (HBase region server fs) delegation token
      // Since we have checked the permission via 'preBulkLoadHFile', now let's give
      // the 'request user' necessary token to operate on the target fs.
      // After this point the 'doAs' user will hold two tokens, one for the source fs
      // ('request user'), another for the target fs (HBase region server principal).
      if (userProvider.isHadoopSecurityEnabled()) {
        FsDelegationToken targetfsDelegationToken = new FsDelegationToken(userProvider, "renewer");
        try {
          targetfsDelegationToken.acquireDelegationToken(fs);
        } catch (IOException e) {
          ResponseConverter.setControllerException(controller, e);
          done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
          return;
        }
        Token<?> targetFsToken = targetfsDelegationToken.getUserToken();
        if (targetFsToken != null
            && (userToken == null || !targetFsToken.getService().equals(userToken.getService()))) {
          ugi.addToken(targetFsToken);
        }
      }

      loaded =
          ugi.doAs(
              new PrivilegedAction<Boolean>() {
                @Override
                public Boolean run() {
                  FileSystem fs = null;
                  try {
                    Configuration conf = env.getConfiguration();
                    fs = FileSystem.get(conf);
                    for (Pair<byte[], String> el : familyPaths) {
                      Path p = new Path(el.getSecond());
                      Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst()));
                      if (!fs.exists(stageFamily)) {
                        fs.mkdirs(stageFamily);
                        fs.setPermission(stageFamily, PERM_ALL_ACCESS);
                      }
                    }
                    // We call bulkLoadHFiles as requesting user
                    // To enable access prior to staging
                    return env.getRegion()
                        .bulkLoadHFiles(
                            familyPaths, true, new SecureBulkLoadListener(fs, bulkToken, conf));
                  } catch (Exception e) {
                    LOG.error("Failed to complete bulk load", e);
                  }
                  return false;
                }
              });
    }
    if (region.getCoprocessorHost() != null) {
      try {
        loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded);
      } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
        done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
        return;
      }
    }
    done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(loaded).build());
  }