@Override protected void removeApplicationStateInternal(ApplicationStateData appState) throws IOException { ApplicationId appId = appState.getApplicationSubmissionContext().getApplicationId(); String appKey = getApplicationNodeKey(appId); try { WriteBatch batch = db.createWriteBatch(); try { batch.delete(bytes(appKey)); for (ApplicationAttemptId attemptId : appState.attempts.keySet()) { String attemptKey = getApplicationAttemptNodeKey(appKey, attemptId); batch.delete(bytes(attemptKey)); } if (LOG.isDebugEnabled()) { LOG.debug( "Removing state for app " + appId + " and " + appState.attempts.size() + " attempts" + " at " + appKey); } db.write(batch); } finally { batch.close(); } } catch (DBException e) { throw new IOException(e); } }
private void storeOrUpdateRMDT( RMDelegationTokenIdentifier tokenId, Long renewDate, boolean isUpdate) throws IOException { String tokenKey = getRMDTTokenNodeKey(tokenId); RMDelegationTokenIdentifierData tokenData = new RMDelegationTokenIdentifierData(tokenId, renewDate); if (LOG.isDebugEnabled()) { LOG.debug("Storing token to " + tokenKey); } try { WriteBatch batch = db.createWriteBatch(); try { batch.put(bytes(tokenKey), tokenData.toByteArray()); if (!isUpdate) { ByteArrayOutputStream bs = new ByteArrayOutputStream(); try (DataOutputStream ds = new DataOutputStream(bs)) { ds.writeInt(tokenId.getSequenceNumber()); } if (LOG.isDebugEnabled()) { LOG.debug( "Storing " + tokenId.getSequenceNumber() + " to " + RM_DT_SEQUENCE_NUMBER_KEY); } batch.put(bytes(RM_DT_SEQUENCE_NUMBER_KEY), bs.toByteArray()); } db.write(batch); } finally { batch.close(); } } catch (DBException e) { throw new IOException(e); } }
@Override public synchronized void mutate(Mutations mutations, boolean sync) { Preconditions.checkArgument(mutations != null, "null mutations"); Preconditions.checkState(this.db != null, "closed"); // Apply mutations in a batch try (WriteBatch batch = this.db.createWriteBatch()) { // Apply removes final ReadOptions iteratorOptions = new ReadOptions().verifyChecksums(this.options.verifyChecksums()).fillCache(false); for (KeyRange range : mutations.getRemoveRanges()) { final byte[] min = range.getMin(); final byte[] max = range.getMax(); if (min != null && max != null && ByteUtil.isConsecutive(min, max)) batch.delete(min); else { try (LevelDBKVStore.Iterator i = this.kv.createIterator(iteratorOptions, min, max, false)) { while (i.hasNext()) batch.delete(i.next().getKey()); } } } // Apply puts for (Map.Entry<byte[], byte[]> entry : mutations.getPutPairs()) batch.put(entry.getKey(), entry.getValue()); // Convert counter adjustments into puts final Function<Map.Entry<byte[], Long>, Map.Entry<byte[], byte[]>> counterPutFunction = new Function<Map.Entry<byte[], Long>, Map.Entry<byte[], byte[]>>() { @Override public Map.Entry<byte[], byte[]> apply(Map.Entry<byte[], Long> adjust) { // Decode old value final byte[] key = adjust.getKey(); final long diff = adjust.getValue(); byte[] oldBytes = LevelDBAtomicKVStore.this.kv.get(key); if (oldBytes == null) oldBytes = new byte[8]; final long oldValue; try { oldValue = LevelDBAtomicKVStore.this.kv.decodeCounter(oldBytes); } catch (IllegalArgumentException e) { return null; } // Add adjustment and re-encode it return new AbstractMap.SimpleEntry<byte[], byte[]>( key, LevelDBAtomicKVStore.this.kv.encodeCounter(oldValue + diff)); } }; // Apply counter adjustments for (Map.Entry<byte[], byte[]> entry : Iterables.transform(mutations.getAdjustPairs(), counterPutFunction)) { if (entry != null) batch.put(entry.getKey(), entry.getValue()); } // Write the batch this.db.write(batch, new WriteOptions().sync(sync)); } catch (IOException e) { throw new DBException("error applying changes to LevelDB", e); } }