@Test
  public void testLocalUser() throws Exception {
    try {
      // nonsecure default
      Configuration conf = new YarnConfiguration();
      conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
      UserGroupInformation.setConfiguration(conf);
      LinuxContainerExecutor lce = new LinuxContainerExecutor();
      lce.setConf(conf);
      Assert.assertEquals(
          YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_LOCAL_USER, lce.getRunAsUser("foo"));

      // nonsecure custom setting
      conf.set(YarnConfiguration.NM_NONSECURE_MODE_LOCAL_USER_KEY, "bar");
      lce = new LinuxContainerExecutor();
      lce.setConf(conf);
      Assert.assertEquals("bar", lce.getRunAsUser("foo"));

      // secure
      conf = new YarnConfiguration();
      conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
      UserGroupInformation.setConfiguration(conf);
      lce = new LinuxContainerExecutor();
      lce.setConf(conf);
      Assert.assertEquals("foo", lce.getRunAsUser("foo"));
    } finally {
      Configuration conf = new YarnConfiguration();
      conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
      UserGroupInformation.setConfiguration(conf);
    }
  }
예제 #2
0
 @Test
 public void testDelegationTokenUrlParam() {
   conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
   UserGroupInformation.setConfiguration(conf);
   String tokenString = "xyzabc";
   String delegationTokenParam = JspHelper.getDelegationTokenUrlParam(tokenString);
   // Security is enabled
   Assert.assertEquals(JspHelper.SET_DELEGATION + "xyzabc", delegationTokenParam);
   conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "simple");
   UserGroupInformation.setConfiguration(conf);
   delegationTokenParam = JspHelper.getDelegationTokenUrlParam(tokenString);
   // Empty string must be returned because security is disabled.
   Assert.assertEquals("", delegationTokenParam);
 }
 @Test(timeout = 20000)
 public void testAppSubmissionWithInvalidDelegationToken() throws Exception {
   Configuration conf = new Configuration();
   conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
   UserGroupInformation.setConfiguration(conf);
   MockRM rm = new MockRM(conf);
   ByteBuffer tokens = ByteBuffer.wrap("BOGUS".getBytes());
   ContainerLaunchContext amContainer =
       ContainerLaunchContext.newInstance(
           new HashMap<String, LocalResource>(),
           new HashMap<String, String>(),
           new ArrayList<String>(),
           new HashMap<String, ByteBuffer>(),
           tokens,
           new HashMap<ApplicationAccessType, String>());
   ApplicationSubmissionContext appSubContext =
       ApplicationSubmissionContext.newInstance(
           ApplicationId.newInstance(1234121, 0),
           "BOGUS",
           "default",
           Priority.UNDEFINED,
           amContainer,
           false,
           true,
           1,
           Resource.newInstance(1024, 1),
           "BOGUS");
   SubmitApplicationRequest request = SubmitApplicationRequest.newInstance(appSubContext);
   try {
     rm.getClientRMService().submitApplication(request);
     fail("Error was excepted.");
   } catch (YarnException e) {
     Assert.assertTrue(e.getMessage().contains("Bad header found in token storage"));
   }
 }
예제 #4
0
 @Before
 public void setupConf() {
   conf = new Configuration();
   conf.setClass(
       "rpc.engine." + StoppedProtocol.class.getName(), StoppedRpcEngine.class, RpcEngine.class);
   UserGroupInformation.setConfiguration(conf);
 }
  @Test
  public void testNonsecureUsernamePattern() throws Exception {
    try {
      // nonsecure default
      Configuration conf = new YarnConfiguration();
      conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
      UserGroupInformation.setConfiguration(conf);
      LinuxContainerExecutor lce = new LinuxContainerExecutor();
      lce.setConf(conf);
      lce.verifyUsernamePattern("foo");
      try {
        lce.verifyUsernamePattern("foo/x");
        Assert.fail();
      } catch (IllegalArgumentException ex) {
        // NOP
      } catch (Throwable ex) {
        Assert.fail(ex.toString());
      }

      // nonsecure custom setting
      conf.set(YarnConfiguration.NM_NONSECURE_MODE_USER_PATTERN_KEY, "foo");
      lce = new LinuxContainerExecutor();
      lce.setConf(conf);
      lce.verifyUsernamePattern("foo");
      try {
        lce.verifyUsernamePattern("bar");
        Assert.fail();
      } catch (IllegalArgumentException ex) {
        // NOP
      } catch (Throwable ex) {
        Assert.fail(ex.toString());
      }

      // secure, pattern matching does not kick in.
      conf = new YarnConfiguration();
      conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
      UserGroupInformation.setConfiguration(conf);
      lce = new LinuxContainerExecutor();
      lce.setConf(conf);
      lce.verifyUsernamePattern("foo");
      lce.verifyUsernamePattern("foo/w");
    } finally {
      Configuration conf = new YarnConfiguration();
      conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
      UserGroupInformation.setConfiguration(conf);
    }
  }
예제 #6
0
 @Before
 public void setup() {
   Logger rootLogger = LogManager.getRootLogger();
   rootLogger.setLevel(Level.DEBUG);
   ExitUtil.disableSystemExit();
   conf = new YarnConfiguration();
   UserGroupInformation.setConfiguration(conf);
   conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
   conf.set(YarnConfiguration.RM_SCHEDULER, FairScheduler.class.getName());
 }
예제 #7
0
 @Override
 public void afterPropertiesSet() throws Exception {
   Assert.notNull(configuration, "Yarn configuration must be set");
   Assert.notNull(protocolClazz, "Rpc protocol class must be set");
   if (UserGroupInformation.isSecurityEnabled()) {
     UserGroupInformation.setConfiguration(configuration);
   }
   address = getRpcAddress(configuration);
   proxy = createProxy();
 }
예제 #8
0
  static {
    setupMockJaasParent();

    Configuration conf = new Configuration();
    conf.set(
        "hadoop.security.auth_to_local",
        "RULE:[2:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//"
            + "RULE:[1:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//"
            + "DEFAULT");
    UserGroupInformation.setConfiguration(conf);
  }
 private void testDelegationTokenWithinDoAs(final Class fileSystemClass, boolean proxyUser)
     throws Exception {
   Configuration conf = new Configuration();
   conf.set("hadoop.security.authentication", "kerberos");
   UserGroupInformation.setConfiguration(conf);
   UserGroupInformation.loginUserFromKeytab("client", "/Users/tucu/tucu.keytab");
   UserGroupInformation ugi = UserGroupInformation.getLoginUser();
   if (proxyUser) {
     ugi = UserGroupInformation.createProxyUser("foo", ugi);
   }
   conf = new Configuration();
   UserGroupInformation.setConfiguration(conf);
   ugi.doAs(
       new PrivilegedExceptionAction<Void>() {
         @Override
         public Void run() throws Exception {
           testDelegationTokenWithFS(fileSystemClass);
           return null;
         }
       });
 }
예제 #10
0
  @Test
  public void testGetNonProxyUgi() throws IOException {
    conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
    ServletContext context = mock(ServletContext.class);
    String realUser = "******";
    String user = "******";
    conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi;
    HttpServletRequest request;

    // have to be auth-ed with remote user
    request = getMockRequest(null, null, null);
    try {
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad request allowed");
    } catch (IOException ioe) {
      Assert.assertEquals(
          "Security enabled but user not authenticated by filter", ioe.getMessage());
    }
    request = getMockRequest(null, realUser, null);
    try {
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad request allowed");
    } catch (IOException ioe) {
      Assert.assertEquals(
          "Security enabled but user not authenticated by filter", ioe.getMessage());
    }

    // ugi for remote user
    request = getMockRequest(realUser, null, null);
    ugi = JspHelper.getUGI(context, request, conf);
    Assert.assertNull(ugi.getRealUser());
    Assert.assertEquals(ugi.getShortUserName(), realUser);
    checkUgiFromAuth(ugi);

    // ugi for remote user = real user
    request = getMockRequest(realUser, realUser, null);
    ugi = JspHelper.getUGI(context, request, conf);
    Assert.assertNull(ugi.getRealUser());
    Assert.assertEquals(ugi.getShortUserName(), realUser);
    checkUgiFromAuth(ugi);

    // ugi for remote user != real user
    request = getMockRequest(realUser, user, null);
    try {
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad request allowed");
    } catch (IOException ioe) {
      Assert.assertEquals(
          "Usernames not matched: name=" + user + " != expected=" + realUser, ioe.getMessage());
    }
  }
예제 #11
0
  public void initialise() throws IOException {
    Configuration conf = HBaseConfiguration.create();
    conf.addResource(new Path(clientConfig));
    conf.addResource(new Path("/etc/hadoop/conf/core-site.xml"));

    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab(user, keytabLocation);

    System.out.println(conf.toString());

    connection = ConnectionFactory.createConnection(conf);
  }
  @Test
  public void testGetKerberosPrincipalWithSubstitutedHostNonSecure() throws Exception {
    String principal =
        StartupProperties.get().getProperty(FalconAuthenticationFilter.KERBEROS_PRINCIPAL);
    Configuration conf = new Configuration(false);
    conf.set("hadoop.security.authentication", "simple");
    UserGroupInformation.setConfiguration(conf);
    Assert.assertFalse(UserGroupInformation.isSecurityEnabled());

    FalconAuthenticationFilter filter = new FalconAuthenticationFilter();
    Properties properties = filter.getConfiguration(FalconAuthenticationFilter.FALCON_PREFIX, null);
    Assert.assertEquals(properties.get(KerberosAuthenticationHandler.PRINCIPAL), principal);
  }
예제 #13
0
 public HBaseUtils(
     String quorum,
     boolean useKerberos,
     String keyTabUsername,
     String kerberosEnv,
     String keyTabFileLocation,
     int regions)
     throws IOException {
   this.regions = regions;
   conf.set("hbase.zookeeper.quorum", quorum);
   if (useKerberos) {
     conf.set("hadoop.security.authentication", "Kerberos");
     conf.set("hbase.security.authentication", "Kerberos");
     conf.set("hbase.master.kerberos.principal", "hbase/_HOST@" + kerberosEnv + ".YOURDOMAIN.COM");
     conf.set(
         "hbase.regionserver.kerberos.principal",
         "hbase/_HOST@" + kerberosEnv + ".YOURDOMAIN.COM");
     conf.set("hbase.client.keyvalue.maxsize", "-1");
     UserGroupInformation.setConfiguration(conf);
     try {
       UserGroupInformation.loginUserFromKeytab(
           keyTabUsername + "@" + kerberosEnv + ".YOURDOMAIN.COM", keyTabFileLocation);
       valid = true;
     } catch (IOException e) {
       e.printStackTrace();
       valid = false;
     }
     kerberosRefresher.scheduleAtFixedRate(
         () -> {
           try {
             UserGroupInformation ugi = UserGroupInformation.getLoginUser();
             if (ugi == null) {
               Logger.error("KERBEROS GOT LOGGED OUT");
               UserGroupInformation.loginUserFromKeytab(
                   keyTabUsername + "@" + kerberosEnv + ".YOURDOMAIN.COM", keyTabFileLocation);
             } else {
               ugi.checkTGTAndReloginFromKeytab();
             }
           } catch (IOException e) {
             e.printStackTrace();
           }
         },
         KERBEROS_EXPIRATION_HOURS,
         KERBEROS_EXPIRATION_HOURS,
         TimeUnit.HOURS);
   } else {
     valid = true;
     conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/hbase-unsecure");
   }
   connection = ConnectionFactory.createConnection(conf);
 }
예제 #14
0
 protected static void initAndStartAppMaster(
     final DragonAppMaster appMaster, final Configuration conf, String jobUserName)
     throws IOException, InterruptedException {
   UserGroupInformation.setConfiguration(conf);
   UserGroupInformation appMasterUgi = UserGroupInformation.createRemoteUser(jobUserName);
   appMasterUgi.doAs(
       new PrivilegedExceptionAction<Object>() {
         @Override
         public Object run() throws Exception {
           appMaster.init(conf);
           appMaster.start();
           return null;
         }
       });
 }
예제 #15
0
  public int run(final String[] argv) throws IOException, InterruptedException {
    int val = -1;
    final Configuration conf = getConf();
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi = UserGroupInformation.getLoginUser();

    val =
        ugi.doAs(
            new PrivilegedExceptionAction<Integer>() {
              public Integer run() throws Exception {
                return runJob(conf, argv);
              }
            });
    return val;
  }
  @Test
  public void testGetInfoServer() throws IOException {
    HdfsConfiguration conf = new HdfsConfiguration();
    conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);

    String httpsport = DFSUtil.getInfoServer(null, conf, true);
    assertEquals("0.0.0.0:" + DFS_NAMENODE_HTTPS_PORT_DEFAULT, httpsport);

    String httpport = DFSUtil.getInfoServer(null, conf, false);
    assertEquals("0.0.0.0:" + DFS_NAMENODE_HTTP_PORT_DEFAULT, httpport);

    String httpAddress =
        DFSUtil.getInfoServer(new InetSocketAddress("localhost", 8020), conf, false);
    assertEquals("localhost:" + DFS_NAMENODE_HTTP_PORT_DEFAULT, httpAddress);
  }
예제 #17
0
 public static void initProcessSecurity(Configuration conf)
     throws IOException, BadConfigException {
   log.info("Secure mode with kerberos realm {}", HoyaUtils.getKerberosRealm());
   // this gets UGI to reset its previous world view (i.e simple auth)
   // security
   SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
   UserGroupInformation.setConfiguration(conf);
   UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
   log.debug("Authenticating as " + ugi.toString());
   log.debug("Login user is {}", UserGroupInformation.getLoginUser());
   if (!UserGroupInformation.isSecurityEnabled()) {
     throw new BadConfigException(
         "Although secure mode is enabled,"
             + "the application has already set up its user as an insecure entity %s",
         ugi);
   }
   HoyaUtils.verifyPrincipalSet(conf, YarnConfiguration.RM_PRINCIPAL);
   HoyaUtils.verifyPrincipalSet(conf, DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY);
 }
 @Before
 public void setUp() throws Exception {
   counter = new AtomicInteger(0);
   conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
   UserGroupInformation.setConfiguration(conf);
   eventQueue = new LinkedBlockingQueue<Event>();
   dispatcher = new AsyncDispatcher(eventQueue);
   Renewer.reset();
   delegationTokenRenewer = createNewDelegationTokenRenewer(conf, counter);
   delegationTokenRenewer.init(conf);
   RMContext mockContext = mock(RMContext.class);
   ClientRMService mockClientRMService = mock(ClientRMService.class);
   when(mockContext.getDelegationTokenRenewer()).thenReturn(delegationTokenRenewer);
   when(mockContext.getDispatcher()).thenReturn(dispatcher);
   when(mockContext.getClientRMService()).thenReturn(mockClientRMService);
   InetSocketAddress sockAddr = InetSocketAddress.createUnresolved("localhost", 1234);
   when(mockClientRMService.getBindAddress()).thenReturn(sockAddr);
   delegationTokenRenewer.setRMContext(mockContext);
   delegationTokenRenewer.start();
 }
예제 #19
0
파일: NameNode.java 프로젝트: imace/hops
  /**
   * Initialize name-node.
   *
   * @param conf the configuration
   */
  protected void initialize(Configuration conf) throws IOException {
    UserGroupInformation.setConfiguration(conf);
    loginAsNameNodeUser(conf);

    HdfsStorageFactory.setConfiguration(conf);

    NameNode.initMetrics(conf, this.getRole());
    loadNamesystem(conf);

    rpcServer = createRpcServer(conf);

    try {
      validateConfigurationSettings(conf);
    } catch (IOException e) {
      LOG.fatal(e.toString());
      throw e;
    }

    startCommonServices(conf);
  }
예제 #20
0
  public int run(final String[] argv) throws IOException, InterruptedException {
    int val = -1;
    final Configuration conf = getConf();
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi = UserGroupInformation.getLoginUser();

    val =
        ugi.doAs(
            new PrivilegedExceptionAction<Integer>() {
              public Integer run() throws Exception {
                return runJob(conf, argv);
              }
            });

    // print the gridmix summary if the run was successful
    if (val == 0) {
      // print the run summary
      System.out.print("\n\n");
      System.out.println(summarizer.toString());
    }

    return val;
  }
예제 #21
0
  @Test
  public void testGetUgi() throws IOException {
    conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
    HttpServletRequest request = mock(HttpServletRequest.class);
    ServletContext context = mock(ServletContext.class);
    String user = "******";
    Text userText = new Text(user);
    DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(userText, userText, null);
    Token<DelegationTokenIdentifier> token =
        new Token<DelegationTokenIdentifier>(dtId, new DummySecretManager(0, 0, 0, 0));
    String tokenString = token.encodeToUrlString();
    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(tokenString);
    when(request.getRemoteUser()).thenReturn(user);

    // Test attribute in the url to be used as service in the token.
    when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn("1.1.1.1:1111");

    conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);

    verifyServiceInToken(context, request, "1.1.1.1:1111");

    // Test attribute name.node.address
    // Set the nnaddr url parameter to null.
    when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn(null);
    InetSocketAddress addr = new InetSocketAddress("localhost", 2222);
    when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY)).thenReturn(addr);
    verifyServiceInToken(context, request, addr.getAddress().getHostAddress() + ":2222");

    // Test service already set in the token
    token.setService(new Text("3.3.3.3:3333"));
    tokenString = token.encodeToUrlString();
    // Set the name.node.address attribute in Servlet context to null
    when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY)).thenReturn(null);
    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(tokenString);
    verifyServiceInToken(context, request, "3.3.3.3:3333");
  }
  @Test
  public void testGetKerberosPrincipalWithSubstitutedHostSecure() throws Exception {
    String principal =
        StartupProperties.get().getProperty(FalconAuthenticationFilter.KERBEROS_PRINCIPAL);

    String expectedPrincipal =
        "falcon/" + SecurityUtil.getLocalHostName().toLowerCase() + "@Example.com";
    try {
      Configuration conf = new Configuration(false);
      conf.set("hadoop.security.authentication", "kerberos");
      UserGroupInformation.setConfiguration(conf);
      Assert.assertTrue(UserGroupInformation.isSecurityEnabled());

      StartupProperties.get()
          .setProperty(FalconAuthenticationFilter.KERBEROS_PRINCIPAL, "falcon/[email protected]");
      FalconAuthenticationFilter filter = new FalconAuthenticationFilter();
      Properties properties =
          filter.getConfiguration(FalconAuthenticationFilter.FALCON_PREFIX, null);
      Assert.assertEquals(
          properties.get(KerberosAuthenticationHandler.PRINCIPAL), expectedPrincipal);
    } finally {
      StartupProperties.get().setProperty(FalconAuthenticationFilter.KERBEROS_PRINCIPAL, principal);
    }
  }
예제 #23
0
  @Test
  public void testGetUgiFromToken() throws IOException {
    conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
    ServletContext context = mock(ServletContext.class);
    String realUser = "******";
    String user = "******";
    conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi;
    HttpServletRequest request;

    Text ownerText = new Text(user);
    DelegationTokenIdentifier dtId =
        new DelegationTokenIdentifier(ownerText, ownerText, new Text(realUser));
    Token<DelegationTokenIdentifier> token =
        new Token<DelegationTokenIdentifier>(dtId, new DummySecretManager(0, 0, 0, 0));
    String tokenString = token.encodeToUrlString();

    // token with no auth-ed user
    request = getMockRequest(null, null, null);
    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(tokenString);
    ugi = JspHelper.getUGI(context, request, conf);
    Assert.assertNotNull(ugi.getRealUser());
    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
    Assert.assertEquals(ugi.getShortUserName(), user);
    checkUgiFromToken(ugi);

    // token with auth-ed user
    request = getMockRequest(realUser, null, null);
    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(tokenString);
    ugi = JspHelper.getUGI(context, request, conf);
    Assert.assertNotNull(ugi.getRealUser());
    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
    Assert.assertEquals(ugi.getShortUserName(), user);
    checkUgiFromToken(ugi);

    // completely different user, token trumps auth
    request = getMockRequest("rogue", null, null);
    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(tokenString);
    ugi = JspHelper.getUGI(context, request, conf);
    Assert.assertNotNull(ugi.getRealUser());
    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
    Assert.assertEquals(ugi.getShortUserName(), user);
    checkUgiFromToken(ugi);

    // expected case
    request = getMockRequest(null, user, null);
    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(tokenString);
    ugi = JspHelper.getUGI(context, request, conf);
    Assert.assertNotNull(ugi.getRealUser());
    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
    Assert.assertEquals(ugi.getShortUserName(), user);
    checkUgiFromToken(ugi);

    // can't proxy with a token!
    request = getMockRequest(null, null, "rogue");
    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(tokenString);
    try {
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad request allowed");
    } catch (IOException ioe) {
      Assert.assertEquals(
          "Usernames not matched: name=rogue != expected=" + user, ioe.getMessage());
    }

    // can't proxy with a token!
    request = getMockRequest(null, user, "rogue");
    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(tokenString);
    try {
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad request allowed");
    } catch (IOException ioe) {
      Assert.assertEquals(
          "Usernames not matched: name=rogue != expected=" + user, ioe.getMessage());
    }
  }
예제 #24
0
  @Test
  public void testErrorMsgForInsecureClient() throws IOException {
    Configuration serverConf = new Configuration(conf);
    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, serverConf);
    UserGroupInformation.setConfiguration(serverConf);

    final Server server =
        new RPC.Builder(serverConf)
            .setProtocol(TestProtocol.class)
            .setInstance(new TestImpl())
            .setBindAddress(ADDRESS)
            .setPort(0)
            .setNumHandlers(5)
            .setVerbose(true)
            .build();
    server.start();

    UserGroupInformation.setConfiguration(conf);
    boolean succeeded = false;
    final InetSocketAddress addr = NetUtils.getConnectAddress(server);
    TestProtocol proxy = null;
    try {
      proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf);
      proxy.echo("");
    } catch (RemoteException e) {
      LOG.info("LOGGING MESSAGE: " + e.getLocalizedMessage());
      assertTrue(e.unwrapRemoteException() instanceof AccessControlException);
      succeeded = true;
    } finally {
      server.stop();
      if (proxy != null) {
        RPC.stopProxy(proxy);
      }
    }
    assertTrue(succeeded);

    conf.setInt(CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, 2);

    UserGroupInformation.setConfiguration(serverConf);
    final Server multiServer =
        new RPC.Builder(serverConf)
            .setProtocol(TestProtocol.class)
            .setInstance(new TestImpl())
            .setBindAddress(ADDRESS)
            .setPort(0)
            .setNumHandlers(5)
            .setVerbose(true)
            .build();
    multiServer.start();
    succeeded = false;
    final InetSocketAddress mulitServerAddr = NetUtils.getConnectAddress(multiServer);
    proxy = null;
    try {
      UserGroupInformation.setConfiguration(conf);
      proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, mulitServerAddr, conf);
      proxy.echo("");
    } catch (RemoteException e) {
      LOG.info("LOGGING MESSAGE: " + e.getLocalizedMessage());
      assertTrue(e.unwrapRemoteException() instanceof AccessControlException);
      succeeded = true;
    } finally {
      multiServer.stop();
      if (proxy != null) {
        RPC.stopProxy(proxy);
      }
    }
    assertTrue(succeeded);
  }
예제 #25
0
  @Test
  public void testGetProxyUgi() throws IOException {
    conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
    ServletContext context = mock(ServletContext.class);
    String realUser = "******";
    String user = "******";
    conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");

    conf.set(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(realUser), "*");
    conf.set(DefaultImpersonationProvider.getProxySuperuserIpConfKey(realUser), "*");
    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi;
    HttpServletRequest request;

    // have to be auth-ed with remote user
    request = getMockRequest(null, null, user);
    try {
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad request allowed");
    } catch (IOException ioe) {
      Assert.assertEquals(
          "Security enabled but user not authenticated by filter", ioe.getMessage());
    }
    request = getMockRequest(null, realUser, user);
    try {
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad request allowed");
    } catch (IOException ioe) {
      Assert.assertEquals(
          "Security enabled but user not authenticated by filter", ioe.getMessage());
    }

    // proxy ugi for user via remote user
    request = getMockRequest(realUser, null, user);
    ugi = JspHelper.getUGI(context, request, conf);
    Assert.assertNotNull(ugi.getRealUser());
    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
    Assert.assertEquals(ugi.getShortUserName(), user);
    checkUgiFromAuth(ugi);

    // proxy ugi for user vi a remote user = real user
    request = getMockRequest(realUser, realUser, user);
    ugi = JspHelper.getUGI(context, request, conf);
    Assert.assertNotNull(ugi.getRealUser());
    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
    Assert.assertEquals(ugi.getShortUserName(), user);
    checkUgiFromAuth(ugi);

    // proxy ugi for user via remote user != real user
    request = getMockRequest(realUser, user, user);
    try {
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad request allowed");
    } catch (IOException ioe) {
      Assert.assertEquals(
          "Usernames not matched: name=" + user + " != expected=" + realUser, ioe.getMessage());
    }

    // try to get get a proxy user with unauthorized user
    try {
      request = getMockRequest(user, null, realUser);
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad proxy request allowed");
    } catch (AuthorizationException ae) {
      Assert.assertEquals(
          "User: "******" is not allowed to impersonate " + realUser, ae.getMessage());
    }
    try {
      request = getMockRequest(user, user, realUser);
      JspHelper.getUGI(context, request, conf);
      Assert.fail("bad proxy request allowed");
    } catch (AuthorizationException ae) {
      Assert.assertEquals(
          "User: "******" is not allowed to impersonate " + realUser, ae.getMessage());
    }
  }
예제 #26
0
  // Run Pig Locally
  public void runPigLocal(
      Map<String, String> params,
      String out,
      String tmp,
      final boolean quiet,
      final boolean silent,
      Configuration conf,
      String queue_name,
      String additional_jars,
      File pig_tmp,
      ArrayList<String> D_options,
      String PIG_DIR,
      FileSystem fs)
      throws IllegalArgumentException, IOException {
    // Create temp file on local to hold data to sort
    final File local_tmp = Files.createTempDir();
    local_tmp.deleteOnExit();

    Runtime.getRuntime()
        .addShutdownHook(
            new Thread(
                new Runnable() {
                  @Override
                  public void run() {
                    try {
                      logConsole(quiet, silent, warn, "Deleting tmp files in local tmp");
                      delete(local_tmp);
                    } catch (IOException e) {
                      // TODO Auto-generated catch block
                      e.printStackTrace();
                    }
                  }
                }));

    // Set input parameter for pig job
    params.put("tmpdir", local_tmp.toString() + "/" + tmp);

    // Check for an out of '-', meaning write to stdout
    String pigout;
    if (out.equals("-")) {
      params.put("out", local_tmp + "/" + tmp + "/final");
      pigout = local_tmp + "/" + tmp + "/final";
    } else {
      params.put("out", local_tmp + "/" + StringEscapeUtils.escapeJava(out));
      pigout = StringEscapeUtils.escapeJava(out);
    }

    // Copy the tmp folder from HDFS to the local tmp directory, and delete the remote folder
    fs.copyToLocalFile(true, new Path(tmp), new Path(local_tmp + "/" + tmp));

    try {
      logConsole(quiet, silent, info, "Running PIG Command");
      conf.set("mapred.job.queue.name", queue_name);
      conf.set("pig.additional.jars", additional_jars);
      conf.set("pig.exec.reducers.bytes.per.reducer", Integer.toString(100 * 1000 * 1000));
      conf.set("pig.logfile", pig_tmp.toString());
      conf.set("hadoopversion", "23");
      // PIG temp directory set to be able to delete all temp files/directories
      conf.set("pig.temp.dir", local_tmp.getAbsolutePath());

      // Setting output separator for logdriver
      String DEFAULT_OUTPUT_SEPARATOR = "\t";
      Charset UTF_8 = Charset.forName("UTF-8");
      String outputSeparator =
          conf.get("logdriver.output.field.separator", DEFAULT_OUTPUT_SEPARATOR);
      byte[] bytes = outputSeparator.getBytes(UTF_8);
      if (bytes.length != 1) {
        System.err.println(
            ";******************** The output separator must be a single byte in UTF-8. ******************** ");
        System.exit(1);
      }
      conf.set("logdriver.output.field.separator", Byte.toString(bytes[0]));

      dOpts(D_options, silent, out, conf);

      PigServer pigServer = new PigServer(ExecType.LOCAL, conf);
      UserGroupInformation.setConfiguration(new Configuration(false));
      pigServer.registerScript(PIG_DIR + "/formatAndSortLocal.pg", params);
    } catch (Exception e) {
      e.printStackTrace();
      System.exit(1);
    }

    logConsole(quiet, silent, warn, "PIG Job Completed.");

    if (out.equals("-")) {
      System.out.println(";#################### DATA RESULTS ####################");
      try {
        File results = new File(pigout);
        String[] resultList = results.list();

        // Find the files in the directory, open and printout results
        for (int i = 0; i < resultList.length; i++) {
          if (resultList[i].contains("part-") && !resultList[i].contains(".crc")) {
            BufferedReader br =
                new BufferedReader(new FileReader(new File(pigout + "/" + resultList[i])));
            String line;
            line = br.readLine();
            while (line != null) {
              System.out.println(line);
              line = br.readLine();
            }
            br.close();
          }
        }
        System.out.println(";#################### END OF RESULTS ####################");
      } catch (IOException e) {
        e.printStackTrace();
        System.exit(1);
      }
    } else {
      fs.copyFromLocalFile(
          new Path(local_tmp + "/" + StringEscapeUtils.escapeJava(out)), new Path(pigout));
      System.out.println(
          ";#################### Done. Search results are in " + pigout + " ####################");
    }
  }
예제 #27
0
  private void validateHadoopFS(List<ConfigIssue> issues) {
    boolean validHapoopFsUri = true;
    hadoopConf = getHadoopConfiguration(issues);
    String hdfsUriInConf;
    if (hdfsUri != null && !hdfsUri.isEmpty()) {
      hadoopConf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, hdfsUri);
    } else {
      hdfsUriInConf = hadoopConf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY);
      if (hdfsUriInConf == null) {
        issues.add(
            getContext().createConfigIssue(Groups.HADOOP_FS.name(), "hdfsUri", Errors.HADOOPFS_19));
        return;
      } else {
        hdfsUri = hdfsUriInConf;
      }
    }
    if (hdfsUri.contains("://")) {
      try {
        URI uri = new URI(hdfsUri);
        if (!"hdfs".equals(uri.getScheme())) {
          issues.add(
              getContext()
                  .createConfigIssue(
                      Groups.HADOOP_FS.name(),
                      "hdfsUri",
                      Errors.HADOOPFS_12,
                      hdfsUri,
                      uri.getScheme()));
          validHapoopFsUri = false;
        } else if (uri.getAuthority() == null) {
          issues.add(
              getContext()
                  .createConfigIssue(
                      Groups.HADOOP_FS.name(), "hdfsUri", Errors.HADOOPFS_13, hdfsUri));
          validHapoopFsUri = false;
        }
      } catch (Exception ex) {
        issues.add(
            getContext()
                .createConfigIssue(
                    Groups.HADOOP_FS.name(),
                    "hdfsUri",
                    Errors.HADOOPFS_22,
                    hdfsUri,
                    ex.getMessage(),
                    ex));
        validHapoopFsUri = false;
      }
    } else {
      issues.add(
          getContext()
              .createConfigIssue(Groups.HADOOP_FS.name(), "hdfsUri", Errors.HADOOPFS_02, hdfsUri));
      validHapoopFsUri = false;
    }

    StringBuilder logMessage = new StringBuilder();
    try {
      // forcing UGI to initialize with the security settings from the stage
      UserGroupInformation.setConfiguration(hadoopConf);
      Subject subject = Subject.getSubject(AccessController.getContext());
      if (UserGroupInformation.isSecurityEnabled()) {
        loginUgi = UserGroupInformation.getUGIFromSubject(subject);
      } else {
        UserGroupInformation.loginUserFromSubject(subject);
        loginUgi = UserGroupInformation.getLoginUser();
      }
      LOG.info(
          "Subject = {}, Principals = {}, Login UGI = {}",
          subject,
          subject == null ? "null" : subject.getPrincipals(),
          loginUgi);
      if (hdfsKerberos) {
        logMessage.append("Using Kerberos");
        if (loginUgi.getAuthenticationMethod()
            != UserGroupInformation.AuthenticationMethod.KERBEROS) {
          issues.add(
              getContext()
                  .createConfigIssue(
                      Groups.HADOOP_FS.name(),
                      "hdfsKerberos",
                      Errors.HADOOPFS_00,
                      loginUgi.getAuthenticationMethod(),
                      UserGroupInformation.AuthenticationMethod.KERBEROS));
        }
      } else {
        logMessage.append("Using Simple");
        hadoopConf.set(
            CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
            UserGroupInformation.AuthenticationMethod.SIMPLE.name());
      }
      if (validHapoopFsUri) {
        getUGI()
            .doAs(
                new PrivilegedExceptionAction<Void>() {
                  @Override
                  public Void run() throws Exception {
                    try (FileSystem fs = getFileSystemForInitDestroy()) { // to trigger the close
                    }
                    return null;
                  }
                });
      }
    } catch (Exception ex) {
      LOG.info("Error connecting to FileSystem: " + ex, ex);
      issues.add(
          getContext()
              .createConfigIssue(
                  Groups.HADOOP_FS.name(),
                  null,
                  Errors.HADOOPFS_11,
                  hdfsUri,
                  String.valueOf(ex),
                  ex));
    }
    LOG.info("Authentication Config: " + logMessage);
  }
예제 #28
0
  @Test(timeout = 60000)
  public void testSecureProxyAuthParamsInUrl() throws IOException {
    Configuration conf = new Configuration();
    // fake turning on security so api thinks it should use tokens
    SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
    UserGroupInformation.setConfiguration(conf);

    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
    ugi.setAuthenticationMethod(KERBEROS);
    ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
    UserGroupInformation.setLoginUser(ugi);

    WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
    Path fsPath = new Path("/");
    String tokenString = webhdfs.getDelegationToken().encodeToUrlString();

    // send real+effective
    URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
    checkQueryParams(
        new String[] {
          GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString()
        },
        getTokenUrl);

    // send real+effective
    URL renewTokenUrl =
        webhdfs.toUrl(
            PutOpParam.Op.RENEWDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(
        new String[] {
          PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
        },
        renewTokenUrl);

    // send token
    URL cancelTokenUrl =
        webhdfs.toUrl(
            PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(
        new String[] {
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
        },
        cancelTokenUrl);

    // send token
    URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
    checkQueryParams(
        new String[] {
          GetOpParam.Op.GETFILESTATUS.toQueryString(), new DelegationParam(tokenString).toString()
        },
        fileStatusUrl);

    // wipe out internal token to simulate auth always required
    webhdfs.setDelegationToken(null);

    // send real+effective
    cancelTokenUrl =
        webhdfs.toUrl(
            PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(
        new String[] {
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString()
        },
        cancelTokenUrl);

    // send real+effective
    fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
    checkQueryParams(
        new String[] {
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString()
        },
        fileStatusUrl);
  }
예제 #29
0
 @Before
 public void resetUGI() {
   UserGroupInformation.setConfiguration(new Configuration());
 }
예제 #30
0
  @Override
  public void loadFileSystem(
      String host,
      String port,
      String username,
      boolean isHA,
      boolean isMapR,
      List<HdfsPair> parameters,
      String connectionName,
      String chorusUsername) {
    loadHadoopClassLoader();
    Configuration config = new Configuration();

    config.set("fs.defaultFS", buildHdfsPath(host, port, isHA));
    config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");

    if (config.get("hadoop.security.authentication", "simple").equalsIgnoreCase("simple")) {
      config.set("ipc.client.fallback-to-simple-auth-allowed", "true");
    }

    if (parameters != null && parameters.size() > 0) {
      for (HdfsPair pair : parameters) {
        config.set(pair.getKey(), pair.getValue());
      }
    }

    UserGroupInformation.setConfiguration(config);

    try {
      if (isKerberos(config)) {
        SecurityInfo securityInfo = new AnnotatedSecurityInfo();
        SecurityUtil.setSecurityInfoProviders(securityInfo);
        UserGroupInformation ugi =
            HdfsSecurityUtil.getCachedUserGroupInfo(
                connectionName, host, config.get(HdfsSecurityUtil.ALPINE_PRINCIPAL));
        if (ugi == null) {
          ugi =
              HdfsSecurityUtil.kerberosInitForHDFS(
                  config, host, port, connectionName, isHA, isMapR);
        }
        UserGroupInformation proxyUGI =
            HdfsSecurityUtil.createProxyUser(
                (chorusUsername == null || chorusUsername.isEmpty() ? username : chorusUsername),
                ugi);
        fileSystem =
            HdfsSecurityUtil.getHadoopFileSystem(
                config, proxyUGI, host, port, connectionName, isHA, isMapR);
        if (!loadedSuccessfully()) {
          if (checkForExpiredTicket()) {
            ugi =
                HdfsSecurityUtil.kerberosInitForHDFS(
                    config, host, port, connectionName, isHA, isMapR);
            proxyUGI =
                HdfsSecurityUtil.createProxyUser(
                    (chorusUsername == null || chorusUsername.isEmpty()
                        ? username
                        : chorusUsername),
                    ugi);
            fileSystem =
                HdfsSecurityUtil.getHadoopFileSystem(
                    config, proxyUGI, host, port, connectionName, isHA, isMapR);
          }
        }
      } else {
        fileSystem = FileSystem.get(FileSystem.getDefaultUri(config), config, username);
      }

    } catch (Exception e) {
      System.err.println("V3 plugin failed FileSystem.get");
      System.err.println(e.getMessage());
      e.printStackTrace(System.err);
    } finally {
      restoreOriginalClassLoader();
    }
  }