예제 #1
0
  @Override
  public void start(CoprocessorEnvironment env) {
    this.env = (RegionCoprocessorEnvironment) env;
    random = new SecureRandom();
    conf = env.getConfiguration();
    baseStagingDir = SecureBulkLoadUtil.getBaseStagingDir(conf);
    this.userProvider = UserProvider.instantiate(conf);

    try {
      fs = FileSystem.get(conf);
      fs.mkdirs(baseStagingDir, PERM_HIDDEN);
      fs.setPermission(baseStagingDir, PERM_HIDDEN);
      // no sticky bit in hadoop-1.0, making directory nonempty so it never gets erased
      fs.mkdirs(new Path(baseStagingDir, "DONOTERASE"), PERM_HIDDEN);
      FileStatus status = fs.getFileStatus(baseStagingDir);
      if (status == null) {
        throw new IllegalStateException("Failed to create staging directory");
      }
      if (!status.getPermission().equals(PERM_HIDDEN)) {
        throw new IllegalStateException(
            "Directory already exists but permissions aren't set to '-rwx--x--x' ");
      }
    } catch (IOException e) {
      throw new IllegalStateException("Failed to get FileSystem instance", e);
    }
  }
 private Connection getUnsecuredHBaseClient(Configuration hbaseConf)
     throws InterruptedException, URISyntaxException, LoginException, IOException {
   SystemEnvironment systemEnvironment = new SystemEnvironment();
   Configuration conf = HBaseConfiguration.create(hbaseConf);
   User user =
       UserProvider.instantiate(hbaseConf)
           .create(
               UserGroupInformation.createRemoteUser(
                   systemEnvironment.getVariable(SystemEnvironment.KRB_USER)));
   return ConnectionFactory.createConnection(conf, user);
 }
예제 #3
0
  public void startServletContainer(Configuration conf) throws Exception {
    if (server != null) {
      LOG.error("ServletContainer already running");
      return;
    }

    // Inject the conf for the test by being first to make singleton
    RESTServlet.getInstance(conf, UserProvider.instantiate(conf));

    // set up the Jersey servlet container for Jetty
    ResourceConfig app =
        new ResourceConfig()
            .packages("org.apache.hadoop.hbase.rest")
            .register(Jackson1Feature.class);
    ServletHolder sh = new ServletHolder(new ServletContainer(app));

    // set up Jetty and run the embedded server
    server = new Server(0);
    LOG.info("configured " + ServletContainer.class.getName());

    HttpConfiguration httpConfig = new HttpConfiguration();
    httpConfig.setSendDateHeader(false);
    httpConfig.setSendServerVersion(false);
    ServerConnector serverConnector =
        new ServerConnector(server, new HttpConnectionFactory(httpConfig));
    serverConnector.setPort(testServletPort);

    server.addConnector(serverConnector);

    // set up context
    ServletContextHandler ctxHandler =
        new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS);
    ctxHandler.addServlet(sh, "/*");
    // Load filters specified from configuration.
    String[] filterClasses =
        conf.getStrings(Constants.FILTER_CLASSES, ArrayUtils.EMPTY_STRING_ARRAY);
    for (String filter : filterClasses) {
      filter = filter.trim();
      ctxHandler.addFilter(filter, "/*", EnumSet.of(DispatcherType.REQUEST));
    }
    LOG.info("Loaded filter classes :" + filterClasses);

    conf.set(RESTServer.REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*");
    RESTServer.addCSRFFilter(ctxHandler, conf);

    HttpServerUtil.constrainHttpMethods(ctxHandler);

    // start the server
    server.start();
    // get the port
    testServletPort = ((ServerConnector) server.getConnectors()[0]).getLocalPort();

    LOG.info("started " + server.getClass().getName() + " on port " + testServletPort);
  }
  private Connection getSecuredHBaseClient(Configuration hbaseConf)
      throws InterruptedException, URISyntaxException, LoginException, IOException {
    LOGGER.info("Trying kerberos authentication");
    KrbLoginManager loginManager =
        KrbLoginManagerFactory.getInstance()
            .getKrbLoginManagerInstance(
                kerberosHbaseProperties.getKdc(), kerberosHbaseProperties.getRealm());

    SystemEnvironment systemEnvironment = new SystemEnvironment();
    Subject subject =
        loginManager.loginWithCredentials(
            systemEnvironment.getVariable(SystemEnvironment.KRB_USER),
            systemEnvironment.getVariable(SystemEnvironment.KRB_PASSWORD).toCharArray());
    loginManager.loginInHadoop(subject, hbaseConf);
    Configuration conf = HBaseConfiguration.create(hbaseConf);
    User user =
        UserProvider.instantiate(conf).create(UserGroupInformation.getUGIFromSubject(subject));
    return ConnectionFactory.createConnection(conf, user);
  }