Ejemplo n.º 1
0
    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
      super.init(m, parameters);
      // init output object inspectors
      ///  input will be key, value and batch size
      LOG.info(" Init mode = " + m);
      System.out.println(" Init mode = " + m);
      System.out.println(" parameters =  = " + parameters + " Length = " + parameters.length);
      configMap = new HashMap<String, String>();
      for (int k = 0; k < parameters.length; ++k) {
        LOG.info("Param " + k + " is " + parameters[k]);
        System.out.println("Param " + k + " is " + parameters[k]);
      }

      if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {
        configMap = HTableFactory.getConfigFromConstMapInspector(parameters[0]);
        checkConfig(configMap);

        inputKeyOI = (PrimitiveObjectInspector) parameters[1];

        try {
          LOG.info(" Initializing HTable ");
          table = HTableFactory.getHTable(configMap);

          if (configMap.containsKey(BATCH_SIZE_TAG)) {
            batchSize = Integer.parseInt(configMap.get(BATCH_SIZE_TAG));
          }

          if (configMap.containsKey(DISABLE_AUTO_FLUSH)) {
            disableAutoFlush = Boolean.valueOf(configMap.get(DISABLE_AUTO_FLUSH));
            LOG.info("Disabling auto flush on hbase deletes");
          }

          //                    if (configMap.containsKey(DISABLE_WAL)) {
          //                        disableWAL = Boolean.valueOf(configMap.get(DISABLE_WAL));
          //                        LOG.info("Disabling WAL writes on hbase deletes");
          //                    }
          //
          //                    if (configMap.containsKey(WRITE_BUFFER_SIZE_MB)) {
          //                        writeBufferSizeBytes =
          // Integer.parseInt(configMap.get(WRITE_BUFFER_SIZE_MB)) * 1024 * 1024;
          //                        LOG.info("Setting habase write buffer size to: " +
          // writeBufferSizeBytes);
          //                    }
        } catch (IOException e) {
          throw new HiveException(e);
        }

      } else {
        listKVOI = (StandardListObjectInspector) parameters[0];
      }

      if (m == Mode.PARTIAL1 || m == Mode.PARTIAL2) {
        return ObjectInspectorFactory.getStandardListObjectInspector(
            ObjectInspectorFactory.getStandardListObjectInspector(
                PrimitiveObjectInspectorFactory.javaStringObjectInspector));
      } else {
        /// Otherwise return a message
        return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
      }
    }
Ejemplo n.º 2
0
    protected void batchUpdate(DeleteBuffer kvBuff, boolean flushCommits) throws HiveException {
      try {

        HTable htable = HTableFactory.getHTable(configMap);
        // Disable auto flush when specified so in the config map
        if (disableAutoFlush) htable.setAutoFlushTo(false);

        // Overwrite the write buffer size when config map specifies to do so
        if (writeBufferSizeBytes > 0) htable.setWriteBufferSize(writeBufferSizeBytes);
        System.out.println("deleting" + kvBuff.deleteList + "size" + kvBuff.deleteList.size());
        if (flushCommits) htable.flushCommits();
        numDeleteRecords += kvBuff.deleteList.size();
        if (kvBuff.deleteList.size() > 0)
          LOG.info(
              " Doing Batch Delete "
                  + kvBuff.deleteList.size()
                  + " records; Total delete records = "
                  + numDeleteRecords
                  + " ; Start = "
                  + (new String(kvBuff.deleteList.get(0).getRow()))
                  + " ; End = "
                  + (new String(kvBuff.deleteList.get(kvBuff.deleteList.size() - 1).getRow())));
        else LOG.info(" Doing Batch Delete with ZERO 0 records");

        getReporter()
            .getCounter(BatchDeleteUDAFCounter.NUMBER_OF_SUCCESSFUL_DELETES)
            .increment(kvBuff.deleteList.size());
        getReporter().getCounter(BatchDeleteUDAFCounter.NUMBER_OF_BATCH_OPERATIONS).increment(1);
        htable.delete(kvBuff.deleteList);
        kvBuff.deleteList.clear();
      } catch (IOException e) {
        throw new HiveException(e);
      }
    }
Ejemplo n.º 3
0
    @Override
    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
      byte[] key = HTableFactory.getByteArray(parameters[1], inputKeyOI);

      if (key != null) {
        DeleteBuffer kvBuff = (DeleteBuffer) agg;
        kvBuff.addKey(key);
        if (kvBuff.deleteList.size() >= batchSize) {
          batchUpdate(kvBuff, false);
        }
      } else {
        getReporter().getCounter(BatchDeleteUDAFCounter.NULL_KEY_DELETE_FAILURE).increment(1);
      }
    }