コード例 #1
0
 @Test
 public void testUGIAuthMethodInRealUser() throws Exception {
   final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
   UserGroupInformation proxyUgi = UserGroupInformation.createProxyUser("proxy", ugi);
   final AuthenticationMethod am = AuthenticationMethod.KERBEROS;
   ugi.setAuthenticationMethod(am);
   Assert.assertEquals(am, ugi.getAuthenticationMethod());
   Assert.assertEquals(AuthenticationMethod.PROXY, proxyUgi.getAuthenticationMethod());
   proxyUgi.doAs(
       new PrivilegedExceptionAction<Object>() {
         public Object run() throws IOException {
           Assert.assertEquals(
               AuthenticationMethod.PROXY,
               UserGroupInformation.getCurrentUser().getAuthenticationMethod());
           Assert.assertEquals(
               am, UserGroupInformation.getCurrentUser().getRealUser().getAuthenticationMethod());
           return null;
         }
       });
   UserGroupInformation proxyUgi2 = new UserGroupInformation(proxyUgi.getSubject());
   proxyUgi2.setAuthenticationMethod(AuthenticationMethod.PROXY);
   Assert.assertEquals(proxyUgi, proxyUgi2);
   // Equality should work if authMethod is null
   UserGroupInformation realugi = UserGroupInformation.getCurrentUser();
   UserGroupInformation proxyUgi3 = UserGroupInformation.createProxyUser("proxyAnother", realugi);
   UserGroupInformation proxyUgi4 = new UserGroupInformation(proxyUgi3.getSubject());
   Assert.assertEquals(proxyUgi3, proxyUgi4);
 }
コード例 #2
0
  /**
   * Getter for proxiedFs, using the passed parameters to create an instance of a proxiedFs.
   *
   * @param properties
   * @param authType is either TOKEN or KEYTAB.
   * @param authPath is the KEYTAB location if the authType is KEYTAB; otherwise, it is the token
   *     file.
   * @param uri File system URI.
   * @throws IOException
   * @throws InterruptedException
   * @throws URISyntaxException
   * @return proxiedFs
   */
  public FileSystem getProxiedFileSystem(
      State properties, AuthType authType, String authPath, String uri)
      throws IOException, InterruptedException, URISyntaxException {
    Preconditions.checkArgument(
        StringUtils.isNotBlank(properties.getProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME)),
        "State does not contain a proper proxy user name");
    String proxyUserName = properties.getProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME);
    UserGroupInformation proxyUser;
    switch (authType) {
      case KEYTAB: // If the authentication type is KEYTAB, log in a super user first before
                   // creating a proxy user.
        Preconditions.checkArgument(
            StringUtils.isNotBlank(
                properties.getProp(ConfigurationKeys.SUPER_USER_NAME_TO_PROXY_AS_OTHERS)),
            "State does not contain a proper proxy token file name");
        String superUser = properties.getProp(ConfigurationKeys.SUPER_USER_NAME_TO_PROXY_AS_OTHERS);
        UserGroupInformation.loginUserFromKeytab(superUser, authPath);
        proxyUser =
            UserGroupInformation.createProxyUser(
                proxyUserName, UserGroupInformation.getLoginUser());
        break;
      case TOKEN: // If the authentication type is TOKEN, create a proxy user and then add the token
                  // to the user.
        proxyUser =
            UserGroupInformation.createProxyUser(
                proxyUserName, UserGroupInformation.getLoginUser());
        Optional<Token> proxyToken = this.getTokenFromSeqFile(authPath, proxyUserName);
        if (proxyToken.isPresent()) {
          proxyUser.addToken(proxyToken.get());
        } else {
          LOG.warn("No delegation token found for the current proxy user.");
        }
        break;
      default:
        LOG.warn(
            "Creating a proxy user without authentication, which could not perform File system operations.");
        proxyUser =
            UserGroupInformation.createProxyUser(
                proxyUserName, UserGroupInformation.getLoginUser());
        break;
    }

    final Configuration conf = new Configuration();
    JobConfigurationUtils.putStateIntoConfiguration(properties, conf);
    final URI fsURI = URI.create(uri);
    proxyUser.doAs(
        new PrivilegedExceptionAction<Void>() {
          @Override
          public Void run() throws IOException {
            LOG.debug(
                "Now performing file system operations as :"
                    + UserGroupInformation.getCurrentUser());
            proxiedFs = FileSystem.get(fsURI, conf);
            return null;
          }
        });
    return this.proxiedFs;
  }
コード例 #3
0
ファイル: BeeswaxServiceImpl.java プロジェクト: abayer/hue
 private <T> T doWithState(RunningQueryState state, PrivilegedExceptionAction<T> action)
     throws BeeswaxException {
   try {
     UserGroupInformation ugi;
     if (UserGroupInformation.isSecurityEnabled())
       ugi =
           UserGroupInformation.createProxyUser(
               state.query.hadoop_user, UserGroupInformation.getLoginUser());
     else {
       ugi = UserGroupInformation.createRemoteUser(state.query.hadoop_user);
     }
     return ugi.doAs(action);
   } catch (UndeclaredThrowableException e) {
     if (e.getUndeclaredThrowable() instanceof PrivilegedActionException) {
       Throwable bwe = e.getUndeclaredThrowable().getCause();
       if (bwe instanceof BeeswaxException) {
         LOG.error("Caught BeeswaxException", (BeeswaxException) bwe);
         throw (BeeswaxException) bwe;
       }
     }
     LOG.error("Caught unexpected exception.", e);
     throw new BeeswaxException(e.getMessage(), state.handle.log_context, state.handle);
   } catch (IOException e) {
     LOG.error("Caught IOException", e);
     throw new BeeswaxException(e.getMessage(), state.handle.log_context, state.handle);
   } catch (InterruptedException e) {
     LOG.error("Caught InterruptedException", e);
     throw new BeeswaxException(e.getMessage(), state.handle.log_context, state.handle);
   }
 }
コード例 #4
0
ファイル: DistCp.java プロジェクト: civvy/spring-hadoop
  /**
   * Initiate a copy operation using a command-line style (arguments are specified as {@link
   * String}s).
   *
   * @param arguments the copy arguments
   */
  public void copy(String... arguments) {
    Assert.notEmpty(arguments, "invalid number of arguments");
    // sanitize the arguments
    final List<String> parsedArguments = new ArrayList<String>();
    for (String arg : arguments) {
      parsedArguments.addAll(Arrays.asList(StringUtils.tokenizeToStringArray(arg, " ")));
    }

    try {
      if (StringUtils.hasText(user)) {
        UserGroupInformation ugi =
            UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
        ugi.doAs(
            new PrivilegedExceptionAction<Void>() {
              @Override
              public Void run() throws Exception {
                invokeCopy(
                    configuration, parsedArguments.toArray(new String[parsedArguments.size()]));
                return null;
              }
            });
      } else {
        invokeCopy(configuration, parsedArguments.toArray(new String[parsedArguments.size()]));
      }
    } catch (Exception ex) {
      throw new IllegalStateException("Cannot run distCp impersonated as '" + user + "'", ex);
    }
  }
コード例 #5
0
  // 初始化环境
  static {
    // Properties props = getProperties("hadoop.properties");

    conf = new Configuration();
    conf.addResource(new Path("F:\\hdfs-site.xml"));
    conf.addResource(new Path("F:\\hbase-site.xml"));
    conf.set("fs.defaultFS", "hdfs://ju51nn");
    try {
      String ugi = "";
      if (Strings.isNullOrEmpty(ugi)) {
        fs = FileSystem.get(conf);
        System.out.println("======" + fs);
      } else {
        UserGroupInformation.createProxyUser(ugi, UserGroupInformation.getLoginUser())
            .doAs(
                new PrivilegedExceptionAction<Void>() {
                  @Override
                  public Void run() throws Exception {
                    fs = FileSystem.get(conf);
                    return null;
                  }
                });
      }
    } catch (Exception e) {
      LOG.error("初始化FileSytem对象异常: ", e.getMessage());
      e.printStackTrace();
    }
  }
コード例 #6
0
 @Test
 public void testEqualsWithRealUser() throws Exception {
   UserGroupInformation realUgi1 =
       UserGroupInformation.createUserForTesting("RealUser", GROUP_NAMES);
   UserGroupInformation realUgi2 =
       UserGroupInformation.createUserForTesting("RealUser", GROUP_NAMES);
   UserGroupInformation proxyUgi1 = UserGroupInformation.createProxyUser(USER_NAME, realUgi1);
   UserGroupInformation proxyUgi2 = new UserGroupInformation(proxyUgi1.getSubject());
   UserGroupInformation remoteUgi = UserGroupInformation.createRemoteUser(USER_NAME);
   assertEquals(proxyUgi1, proxyUgi2);
   assertFalse(remoteUgi.equals(proxyUgi1));
 }
コード例 #7
0
 protected Token<?>[] obtainSystemTokensForUser(String user, final Credentials credentials)
     throws IOException, InterruptedException {
   // Get new hdfs tokens on behalf of this user
   UserGroupInformation proxyUser =
       UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
   Token<?>[] newTokens =
       proxyUser.doAs(
           new PrivilegedExceptionAction<Token<?>[]>() {
             @Override
             public Token<?>[] run() throws Exception {
               return FileSystem.get(getConfig())
                   .addDelegationTokens(
                       UserGroupInformation.getLoginUser().getUserName(), credentials);
             }
           });
   return newTokens;
 }
コード例 #8
0
ファイル: JspHelper.java プロジェクト: baggioss/hadoop-cdh3u5
 private static UserGroupInformation initUGI(
     final UserGroupInformation realUgi,
     final String doAsUserFromQuery,
     final HttpServletRequest request,
     final boolean isSecurityEnabled,
     final Configuration conf)
     throws AuthorizationException {
   final UserGroupInformation ugi;
   if (doAsUserFromQuery == null) {
     // non-proxy case
     ugi = realUgi;
   } else {
     // proxy case
     ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, realUgi);
     ugi.setAuthenticationMethod(
         isSecurityEnabled ? AuthenticationMethod.PROXY : AuthenticationMethod.SIMPLE);
     ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf);
   }
   return ugi;
 }
コード例 #9
0
  @Test(timeout = 60000)
  public void testSimpleProxyAuthParamsInUrl() throws IOException {
    Configuration conf = new Configuration();

    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
    ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
    UserGroupInformation.setLoginUser(ugi);

    WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
    Path fsPath = new Path("/");

    // send real+effective
    URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
    checkQueryParams(
        new String[] {
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString()
        },
        fileStatusUrl);
  }
コード例 #10
0
 private void testDelegationTokenWithinDoAs(final Class fileSystemClass, boolean proxyUser)
     throws Exception {
   Configuration conf = new Configuration();
   conf.set("hadoop.security.authentication", "kerberos");
   UserGroupInformation.setConfiguration(conf);
   UserGroupInformation.loginUserFromKeytab("client", "/Users/tucu/tucu.keytab");
   UserGroupInformation ugi = UserGroupInformation.getLoginUser();
   if (proxyUser) {
     ugi = UserGroupInformation.createProxyUser("foo", ugi);
   }
   conf = new Configuration();
   UserGroupInformation.setConfiguration(conf);
   ugi.doAs(
       new PrivilegedExceptionAction<Void>() {
         @Override
         public Void run() throws Exception {
           testDelegationTokenWithFS(fileSystemClass);
           return null;
         }
       });
 }
コード例 #11
0
  @Test(timeout = 60000)
  public void testSecureProxyAuthParamsInUrl() throws IOException {
    Configuration conf = new Configuration();
    // fake turning on security so api thinks it should use tokens
    SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
    UserGroupInformation.setConfiguration(conf);

    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
    ugi.setAuthenticationMethod(KERBEROS);
    ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
    UserGroupInformation.setLoginUser(ugi);

    WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
    Path fsPath = new Path("/");
    String tokenString = webhdfs.getDelegationToken().encodeToUrlString();

    // send real+effective
    URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
    checkQueryParams(
        new String[] {
          GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString()
        },
        getTokenUrl);

    // send real+effective
    URL renewTokenUrl =
        webhdfs.toUrl(
            PutOpParam.Op.RENEWDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(
        new String[] {
          PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
        },
        renewTokenUrl);

    // send token
    URL cancelTokenUrl =
        webhdfs.toUrl(
            PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(
        new String[] {
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
        },
        cancelTokenUrl);

    // send token
    URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
    checkQueryParams(
        new String[] {
          GetOpParam.Op.GETFILESTATUS.toQueryString(), new DelegationParam(tokenString).toString()
        },
        fileStatusUrl);

    // wipe out internal token to simulate auth always required
    webhdfs.setDelegationToken(null);

    // send real+effective
    cancelTokenUrl =
        webhdfs.toUrl(
            PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(
        new String[] {
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString()
        },
        cancelTokenUrl);

    // send real+effective
    fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
    checkQueryParams(
        new String[] {
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString()
        },
        fileStatusUrl);
  }
コード例 #12
0
ファイル: Worker.java プロジェクト: pmedishetty/hive
  // todo: this doesn;t check if compaction is already running (even though Initiator does but we
  // don't go  through Initiator for user initiated compactions)
  @Override
  public void run() {
    do {
      boolean launchedJob = false;
      // Make sure nothing escapes this run method and kills the metastore at large,
      // so wrap it in a big catch Throwable statement.
      try {
        final CompactionInfo ci = txnHandler.findNextToCompact(name);

        if (ci == null && !stop.get()) {
          try {
            Thread.sleep(SLEEP_TIME);
            continue;
          } catch (InterruptedException e) {
            LOG.warn("Worker thread sleep interrupted " + e.getMessage());
            continue;
          }
        }

        // Find the table we will be working with.
        Table t1 = null;
        try {
          t1 = resolveTable(ci);
          if (t1 == null) {
            LOG.info(
                "Unable to find table "
                    + ci.getFullTableName()
                    + ", assuming it was dropped and moving on.");
            txnHandler.markCleaned(ci);
            continue;
          }
        } catch (MetaException e) {
          txnHandler.markCleaned(ci);
          continue;
        }
        // This chicanery is to get around the fact that the table needs to be final in order to
        // go into the doAs below.
        final Table t = t1;

        // Find the partition we will be working with, if there is one.
        Partition p = null;
        try {
          p = resolvePartition(ci);
          if (p == null && ci.partName != null) {
            LOG.info(
                "Unable to find partition "
                    + ci.getFullPartitionName()
                    + ", assuming it was dropped and moving on.");
            txnHandler.markCleaned(ci);
            continue;
          }
        } catch (Exception e) {
          txnHandler.markCleaned(ci);
          continue;
        }

        // Find the appropriate storage descriptor
        final StorageDescriptor sd = resolveStorageDescriptor(t, p);

        // Check that the table or partition isn't sorted, as we don't yet support that.
        if (sd.getSortCols() != null && !sd.getSortCols().isEmpty()) {
          LOG.error("Attempt to compact sorted table, which is not yet supported!");
          txnHandler.markCleaned(ci);
          continue;
        }

        final boolean isMajor = ci.isMajorCompaction();
        final ValidTxnList txns =
            CompactionTxnHandler.createValidCompactTxnList(txnHandler.getOpenTxnsInfo());
        LOG.debug("ValidCompactTxnList: " + txns.writeToString());
        txnHandler.setCompactionHighestTxnId(ci, txns.getHighWatermark());
        final StringBuilder jobName = new StringBuilder(name);
        jobName.append("-compactor-");
        jobName.append(ci.getFullPartitionName());

        // Determine who to run as
        String runAs;
        if (ci.runAs == null) {
          runAs = findUserToRunAs(sd.getLocation(), t);
          txnHandler.setRunAs(ci.id, runAs);
        } else {
          runAs = ci.runAs;
        }

        LOG.info("Starting " + ci.type.toString() + " compaction for " + ci.getFullPartitionName());

        final StatsUpdater su =
            StatsUpdater.init(
                ci,
                txnHandler.findColumnsWithStats(ci),
                conf,
                runJobAsSelf(runAs) ? runAs : t.getOwner());
        final CompactorMR mr = new CompactorMR();
        launchedJob = true;
        try {
          if (runJobAsSelf(runAs)) {
            mr.run(conf, jobName.toString(), t, sd, txns, ci, su);
          } else {
            UserGroupInformation ugi =
                UserGroupInformation.createProxyUser(
                    t.getOwner(), UserGroupInformation.getLoginUser());
            ugi.doAs(
                new PrivilegedExceptionAction<Object>() {
                  @Override
                  public Object run() throws Exception {
                    mr.run(conf, jobName.toString(), t, sd, txns, ci, su);
                    return null;
                  }
                });
          }
          txnHandler.markCompacted(ci);
        } catch (Exception e) {
          LOG.error(
              "Caught exception while trying to compact "
                  + ci
                  + ".  Marking clean to avoid repeated failures, "
                  + StringUtils.stringifyException(e));
          txnHandler.markFailed(ci);
        }
      } catch (Throwable t) {
        LOG.error(
            "Caught an exception in the main loop of compactor worker "
                + name
                + ", "
                + StringUtils.stringifyException(t));
      }

      // If we didn't try to launch a job it either means there was no work to do or we got
      // here as the result of a communication failure with the DB.  Either way we want to wait
      // a bit before we restart the loop.
      if (!launchedJob && !stop.get()) {
        try {
          Thread.sleep(SLEEP_TIME);
        } catch (InterruptedException e) {
        }
      }
    } while (!stop.get());
  }
コード例 #13
0
  @SuppressWarnings("rawtypes")
  public void afterPropertiesSet() throws Exception {
    final Configuration cfg = ConfigurationUtils.createFrom(configuration, properties);

    buildGenericOptions(cfg);

    if (StringUtils.hasText(user)) {
      UserGroupInformation ugi =
          UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
      ugi.doAs(
          new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
              job = new Job(cfg);
              return null;
            }
          });
    } else {
      job = new Job(cfg);
    }

    ClassLoader loader =
        (beanClassLoader != null
            ? beanClassLoader
            : org.springframework.util.ClassUtils.getDefaultClassLoader());

    if (jar != null) {
      JobConf conf = (JobConf) job.getConfiguration();
      conf.setJar(jar.getURI().toString());
      loader = ExecutionUtils.createParentLastClassLoader(jar, beanClassLoader, cfg);
      conf.setClassLoader(loader);
    }

    // set first to enable auto-detection of K/V to skip the key/value types to be specified
    if (mapper != null) {
      Class<? extends Mapper> mapperClass = resolveClass(mapper, loader, Mapper.class);
      job.setMapperClass(mapperClass);
      configureMapperTypesIfPossible(job, mapperClass);
    }

    if (reducer != null) {
      Class<? extends Reducer> reducerClass = resolveClass(reducer, loader, Reducer.class);
      job.setReducerClass(reducerClass);
      configureReducerTypesIfPossible(job, reducerClass);
    }

    if (StringUtils.hasText(name)) {
      job.setJobName(name);
    }
    if (combiner != null) {
      job.setCombinerClass(resolveClass(combiner, loader, Reducer.class));
    }
    if (groupingComparator != null) {
      job.setGroupingComparatorClass(resolveClass(groupingComparator, loader, RawComparator.class));
    }
    if (inputFormat != null) {
      job.setInputFormatClass(resolveClass(inputFormat, loader, InputFormat.class));
    }
    if (mapKey != null) {
      job.setMapOutputKeyClass(resolveClass(mapKey, loader, Object.class));
    }
    if (mapValue != null) {
      job.setMapOutputValueClass(resolveClass(mapValue, loader, Object.class));
    }
    if (numReduceTasks != null) {
      job.setNumReduceTasks(numReduceTasks);
    }
    if (key != null) {
      job.setOutputKeyClass(resolveClass(key, loader, Object.class));
    }
    if (value != null) {
      job.setOutputValueClass(resolveClass(value, loader, Object.class));
    }
    if (outputFormat != null) {
      job.setOutputFormatClass(resolveClass(outputFormat, loader, OutputFormat.class));
    }
    if (partitioner != null) {
      job.setPartitionerClass(resolveClass(partitioner, loader, Partitioner.class));
    }
    if (sortComparator != null) {
      job.setSortComparatorClass(resolveClass(sortComparator, loader, RawComparator.class));
    }
    if (StringUtils.hasText(workingDir)) {
      job.setWorkingDirectory(new Path(workingDir));
    }
    if (jarClass != null) {
      job.setJarByClass(jarClass);
    }

    if (!CollectionUtils.isEmpty(inputPaths)) {
      for (String path : inputPaths) {
        FileInputFormat.addInputPath(job, new Path(path));
      }
    }

    if (StringUtils.hasText(outputPath)) {
      FileOutputFormat.setOutputPath(job, new Path(outputPath));
    }

    if (compressOutput != null) {
      FileOutputFormat.setCompressOutput(job, compressOutput);
    }

    if (codecClass != null) {
      FileOutputFormat.setOutputCompressorClass(
          job, resolveClass(codecClass, loader, CompressionCodec.class));
    }

    processJob(job);
  }
コード例 #14
0
 private UserGroupInformation getUGI() {
   return (hdfsUser == null || hdfsUser.isEmpty())
       ? loginUgi
       : UserGroupInformation.createProxyUser(hdfsUser, loginUgi);
 }