/**
   * Sets the connector information needed to communicate with Accumulo in this job.
   *
   * <p><b>WARNING:</b> Some tokens, when serialized, divulge sensitive information in the
   * configuration as a means to pass the token to MapReduce tasks. This information is BASE64
   * encoded to provide a charset safe conversion to a string, but this conversion is not intended
   * to be secure. {@link PasswordToken} is one example that is insecure in this way; however {@link
   * DelegationToken}s, acquired using {@link
   * SecurityOperations#getDelegationToken(DelegationTokenConfig)}, is not subject to this concern.
   *
   * @param job the Hadoop job instance to be configured
   * @param principal a valid Accumulo user name (user must have Table.CREATE permission if {@link
   *     #setCreateTables(Job, boolean)} is set to true)
   * @param token the user's password
   * @since 1.5.0
   */
  public static void setConnectorInfo(Job job, String principal, AuthenticationToken token)
      throws AccumuloSecurityException {
    if (token instanceof KerberosToken) {
      log.info("Received KerberosToken, attempting to fetch DelegationToken");
      try {
        Instance instance = getInstance(job);
        Connector conn = instance.getConnector(principal, token);
        token = conn.securityOperations().getDelegationToken(new DelegationTokenConfig());
      } catch (Exception e) {
        log.warn(
            "Failed to automatically obtain DelegationToken, Mappers/Reducers will likely fail to communicate with Accumulo",
            e);
      }
    }
    // DelegationTokens can be passed securely from user to task without serializing insecurely in
    // the configuration
    if (token instanceof DelegationTokenImpl) {
      DelegationTokenImpl delegationToken = (DelegationTokenImpl) token;

      // Convert it into a Hadoop Token
      AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
      Token<AuthenticationTokenIdentifier> hadoopToken =
          new Token<>(
              identifier.getBytes(),
              delegationToken.getPassword(),
              identifier.getKind(),
              delegationToken.getServiceName());

      // Add the Hadoop Token to the Job so it gets serialized and passed along.
      job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
    }

    OutputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), principal, token);
  }
  /**
   * Set all the initial parameters needed in this class for connectivity out to Accumulo.
   *
   * @param context
   */
  private void initialize(JobContext context) { // Configuration conf){

    Configuration conf = context.getConfiguration();
    try {
      // output zoom level
      log.info("Working from zoom level = " + zoomLevel);
      if (zoomLevel == -1) {
        zoomLevel = Integer.parseInt(conf.get(MrGeoAccumuloConstants.MRGEO_ACC_KEY_ZOOMLEVEL));
      }

      table = conf.get(MrGeoAccumuloConstants.MRGEO_ACC_KEY_OUTPUT_TABLE);
      username = conf.get(MrGeoAccumuloConstants.MRGEO_ACC_KEY_USER);
      instanceName = conf.get(MrGeoAccumuloConstants.MRGEO_ACC_KEY_INSTANCE);
      zooKeepers = conf.get(MrGeoAccumuloConstants.MRGEO_ACC_KEY_ZOOKEEPERS);

      String pl = conf.get(MrGeoConstants.MRGEO_PROTECTION_LEVEL);
      if (pl != null) {
        colViz = new ColumnVisibility(pl);
      } else if (colViz == null) {
        vizStr = conf.get(MrGeoAccumuloConstants.MRGEO_ACC_KEY_VIZ);

        if (vizStr == null) {
          colViz = new ColumnVisibility();
        } else {
          colViz = new ColumnVisibility(vizStr);
        }
      }

      password = conf.get(MrGeoAccumuloConstants.MRGEO_ACC_KEY_PASSWORD);
      String isEnc = conf.get(MrGeoAccumuloConstants.MRGEO_ACC_KEY_PWENCODED64, "false");
      if (isEnc.equalsIgnoreCase("true")) {
        password = Base64Utils.decodeToString(password);
      }

      if (_innerFormat != null) {
        return;
      }

      _innerFormat = AccumuloOutputFormat.class.newInstance();
      AuthenticationToken token = new PasswordToken(password.getBytes());
      //      log.info("Setting output with: u = " + username);
      //      log.info("Setting output with: p = " + password);
      //      log.info("Setting output with: i = " + instanceName);
      //      log.info("Setting output with: z = " + zooKeepers);

      boolean connSet = ConfiguratorBase.isConnectorInfoSet(AccumuloOutputFormat.class, conf);
      if (!connSet) {
        // job not always available - do it how Accumulo does it
        OutputConfigurator.setConnectorInfo(AccumuloOutputFormat.class, conf, username, token);
        ClientConfiguration cc = ClientConfiguration.loadDefault().withInstance(instanceName);
        cc.setProperty(ClientProperty.INSTANCE_ZK_HOST, zooKeepers);

        OutputConfigurator.setZooKeeperInstance(AccumuloOutputFormat.class, conf, cc);
        OutputConfigurator.setDefaultTableName(AccumuloOutputFormat.class, conf, table);
        OutputConfigurator.setCreateTables(AccumuloOutputFormat.class, conf, true);

        outputInfoSet = true;
      }
    } catch (InstantiationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IllegalAccessException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (AccumuloSecurityException ase) {
      ase.printStackTrace();
    } catch (ClassNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }
  } // end initialize
 /**
  * Sets the connector information needed to communicate with Accumulo in this job.
  *
  * <p>Stores the password in a file in HDFS and pulls that into the Distributed Cache in an
  * attempt to be more secure than storing it in the Configuration.
  *
  * @param job the Hadoop job instance to be configured
  * @param principal a valid Accumulo user name (user must have Table.CREATE permission if {@link
  *     #setCreateTables(Job, boolean)} is set to true)
  * @param tokenFile the path to the token file
  * @since 1.6.0
  */
 public static void setConnectorInfo(Job job, String principal, String tokenFile)
     throws AccumuloSecurityException {
   OutputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), principal, tokenFile);
 }