/**
   * Tests backward compatibility. Configuration can be either set with old param dfs.umask that
   * takes decimal umasks or dfs.umaskmode that takes symbolic or octal umask.
   */
  public void testBackwardCompatibility() {
    // Test 1 - old configuration key with decimal
    // umask value should be handled when set using
    // FSPermission.setUMask() API
    FsPermission perm = new FsPermission((short) 18);
    Configuration conf = new Configuration();
    FsPermission.setUMask(conf, perm);
    assertEquals(18, FsPermission.getUMask(conf).toShort());

    // Test 2 - old configuration key set with decimal
    // umask value should be handled
    perm = new FsPermission((short) 18);
    conf = new Configuration();
    conf.set(FsPermission.DEPRECATED_UMASK_LABEL, "18");
    assertEquals(18, FsPermission.getUMask(conf).toShort());

    // Test 3 - old configuration key overrides the new one
    conf = new Configuration();
    conf.set(FsPermission.DEPRECATED_UMASK_LABEL, "18");
    conf.set(FsPermission.UMASK_LABEL, "000");
    assertEquals(18, FsPermission.getUMask(conf).toShort());

    // Test 4 - new configuration key is handled
    conf = new Configuration();
    conf.set(FsPermission.UMASK_LABEL, "022");
    assertEquals(18, FsPermission.getUMask(conf).toShort());
  }
Ejemplo n.º 2
0
/**
 * {@link HadoopFile} implementation for the HDFS protocol.
 *
 * @author Maxence Bernard
 */
public class HDFSFile extends HadoopFile {

  // TODO: allow a custom group to be set (see TODO below)
  //    /** Name of the property holding the file's group */
  //    public static final String GROUP_PROPERTY_NAME = "group";

  /** Default username */
  private static String DEFAULT_USERNAME;

  /** Default group */
  private static String DEFAULT_GROUP;

  /** Default file permissions */
  private static final FilePermissions DEFAULT_PERMISSIONS =
      new SimpleFilePermissions(
          FsPermission.getDefault()
                  .applyUMask(FsPermission.getUMask(DEFAULT_CONFIGURATION))
                  .toShort()
              & PermissionBits.FULL_PERMISSION_INT);

  static {
    try {
      UnixUserGroupInformation ugi = UnixUserGroupInformation.login(DEFAULT_CONFIGURATION);
      DEFAULT_USERNAME = ugi.getUserName();
      // Do not use default groups, as these are pretty much useless
    } catch (Exception e) {
      // Should never happen but default to a reasonable value if it does
      DEFAULT_USERNAME = System.getProperty("user.name");
    }

    DEFAULT_GROUP = DEFAULT_CONFIGURATION.get("dfs.permissions.supergroup", "supergroup");
  }

  protected HDFSFile(FileURL url) throws IOException {
    super(url);
  }

  protected HDFSFile(FileURL url, FileSystem fs, FileStatus fileStatus) throws IOException {
    super(url, fs, fileStatus);
  }

  public static String getDefaultUsername() {
    return DEFAULT_USERNAME;
  }

  public static String getDefaultGroup() {
    return DEFAULT_GROUP;
  }

  private static String getUsername(FileURL url) {
    Credentials credentials = url.getCredentials();
    String username;
    if (credentials == null || (username = credentials.getLogin()).equals(""))
      username = getDefaultUsername();

    return username;
  }

  private static String getGroup(FileURL url) {
    //        // Import the group from the URL's 'group' property, if set
    //        String group = url.getProperty(GROUP_PROPERTY_NAME);
    //        if(group==null || group.equals(""))
    //            group = getDefaultGroup();
    //
    //        return group;

    return getDefaultGroup();
  }

  ///////////////////////////////
  // HadoopFile implementation //
  ///////////////////////////////

  @Override
  protected FileSystem getHadoopFileSystem(FileURL url) throws IOException {
    // Note: getRealm returns a fresh instance every time
    FileURL realm = url.getRealm();

    Configuration conf = new Configuration();

    // Import the user from the URL's authority, if set
    // TODO: for some reason, setting the group has no effect: files are still created with the
    // default supergroup
    conf.setStrings(UnixUserGroupInformation.UGI_PROPERTY_NAME, getUsername(url), getGroup(url));

    return FileSystem.get(URI.create(realm.toString(false)), conf);
  }

  @Override
  protected void setDefaultFileAttributes(FileURL url, HadoopFileAttributes atts) {
    atts.setOwner(getUsername(url));
    atts.setGroup(getGroup(url));
    atts.setPermissions(DEFAULT_PERMISSIONS);
  }
}