protected void batchUpdate(DeleteBuffer kvBuff, boolean flushCommits) throws HiveException { try { HTable htable = HTableFactory.getHTable(configMap); // Disable auto flush when specified so in the config map if (disableAutoFlush) htable.setAutoFlushTo(false); // Overwrite the write buffer size when config map specifies to do so if (writeBufferSizeBytes > 0) htable.setWriteBufferSize(writeBufferSizeBytes); System.out.println("deleting" + kvBuff.deleteList + "size" + kvBuff.deleteList.size()); if (flushCommits) htable.flushCommits(); numDeleteRecords += kvBuff.deleteList.size(); if (kvBuff.deleteList.size() > 0) LOG.info( " Doing Batch Delete " + kvBuff.deleteList.size() + " records; Total delete records = " + numDeleteRecords + " ; Start = " + (new String(kvBuff.deleteList.get(0).getRow())) + " ; End = " + (new String(kvBuff.deleteList.get(kvBuff.deleteList.size() - 1).getRow()))); else LOG.info(" Doing Batch Delete with ZERO 0 records"); getReporter() .getCounter(BatchDeleteUDAFCounter.NUMBER_OF_SUCCESSFUL_DELETES) .increment(kvBuff.deleteList.size()); getReporter().getCounter(BatchDeleteUDAFCounter.NUMBER_OF_BATCH_OPERATIONS).increment(1); htable.delete(kvBuff.deleteList); kvBuff.deleteList.clear(); } catch (IOException e) { throw new HiveException(e); } }
@Test public void insert_rowkey_prefix_date() throws IOException { System.out.println(errorTable); errorTable.setAutoFlushTo(false); List<Put> puts = new ArrayList<Put>(); long t1 = System.currentTimeMillis(); for (int i = 0; i < 10000000; i++) { String uuid = UUID.randomUUID().toString().replaceAll("-", "").substring(0, 8); Put put = new Put(Bytes.toBytes("20150705" + "_" + uuid)); put.add( fBytes, Bytes.toBytes("stacktrace"), Bytes.toBytes("java.io.IOException:file not found" + UUID.randomUUID().toString())); // puts.add(put); errorTable.put(put); if (i % 10000 == 0) { errorTable.flushCommits(); } } errorTable.flushCommits(); long t2 = System.currentTimeMillis(); System.out.println("count=" + puts.size() + ",t2-t1=" + (t2 - t1)); // errorTable.close(); }