public void writeLogEntry(LogEntry logEntry) throws Throwable { List<Mutation> slice = new ArrayList<Mutation>(); slice.add(getMutation(LogEntryColumns.KS.toString(), logEntry.getKeyspace())); slice.add(getMutation(LogEntryColumns.CF.toString(), logEntry.getColumnFamily())); slice.add(getMutation(LogEntryColumns.ROW.toString(), logEntry.getRowKey())); slice.add(getMutation(LogEntryColumns.STATUS.toString(), logEntry.getStatus().toString())); slice.add( getMutation(LogEntryColumns.TIMESTAMP.toString(), Long.toString(logEntry.getTimestamp()))); slice.add(getMutation(LogEntryColumns.HOST.toString(), logEntry.getHost())); if (logEntry.hasErrors()) { for (String errorKey : logEntry.getErrors().keySet()) { slice.add(getMutation(errorKey, logEntry.getErrors().get(errorKey))); } } if (ConfigurationStore.getStore().shouldWriteColumns()) { for (ColumnOperation operation : logEntry.getOperations()) { if (operation.isDelete()) { slice.add(getMutation(operation.getName(), OperationType.DELETE)); } else { slice.add(getMutation(operation.getName(), OperationType.UPDATE)); } } } Map<ByteBuffer, Map<String, List<Mutation>>> mutationMap = new HashMap<ByteBuffer, Map<String, List<Mutation>>>(); Map<String, List<Mutation>> cfMutations = new HashMap<String, List<Mutation>>(); cfMutations.put(COLUMN_FAMILY, slice); ByteBuffer rowKey = ByteBufferUtil.bytes(logEntry.getUuid()); mutationMap.put(rowKey, cfMutations); getConnection(KEYSPACE).batch_mutate(mutationMap, logEntry.getConsistencyLevel()); }
public List<LogEntry> getPending() throws Throwable { List<LogEntry> result = new ArrayList<LogEntry>(); SlicePredicate predicate = new SlicePredicate(); SliceRange range = new SliceRange( ByteBufferUtil.bytes(""), ByteBufferUtil.bytes(""), false, MAX_NUMBER_COLUMNS); predicate.setSlice_range(range); KeyRange keyRange = new KeyRange(BATCH_SIZE); keyRange.setStart_key(ByteBufferUtil.bytes("")); keyRange.setEnd_key(ByteBufferUtil.EMPTY_BYTE_BUFFER); ColumnParent parent = new ColumnParent(COLUMN_FAMILY); IndexClause indexClause = new IndexClause(); indexClause.setCount(BATCH_SIZE); indexClause.setStart_key(new byte[0]); indexClause.addToExpressions( new IndexExpression( ByteBufferUtil.bytes(LogEntryColumns.STATUS.toString()), IndexOperator.EQ, ByteBufferUtil.bytes(LogEntryStatus.COMMITTED.toString()))); indexClause.addToExpressions( new IndexExpression( ByteBufferUtil.bytes(LogEntryColumns.HOST.toString()), IndexOperator.EQ, ByteBufferUtil.bytes(this.getHostName()))); List<KeySlice> rows = getConnection(KEYSPACE) .get_indexed_slices(parent, indexClause, predicate, ConsistencyLevel.ALL); result.addAll(toLogEntry(rows)); indexClause = new IndexClause(); indexClause.setCount(BATCH_SIZE); indexClause.setStart_key(new byte[0]); indexClause.addToExpressions( new IndexExpression( ByteBufferUtil.bytes(LogEntryColumns.STATUS.toString()), IndexOperator.EQ, ByteBufferUtil.bytes(LogEntryStatus.COMMITTED.toString()))); indexClause.addToExpressions( new IndexExpression( ByteBufferUtil.bytes((LogEntryColumns.TIMESTAMP.toString())), IndexOperator.LT, ByteBufferUtil.bytes( System.currentTimeMillis() - (1000L * TIME_BEFORE_PROCESS_OTHER_HOST)))); rows = getConnection(KEYSPACE) .get_indexed_slices(parent, indexClause, predicate, ConsistencyLevel.ALL); result.addAll(toLogEntry(rows)); return result; }
public DistributedCommitLog(String keyspace, String columnFamily) throws Exception { super( keyspace, columnFamily, new String[] { LogEntryColumns.STATUS.toString(), LogEntryColumns.HOST.toString(), LogEntryColumns.TIMESTAMP.toString() }); logger.warn("Instantiated distributed commit log."); this.getHostName(); triggerTimer = new Timer(true); triggerTimer.schedule(new TriggerTask(), 0, TRIGGER_FREQUENCY); logger.debug("Started Trigger Task thread."); }