public void run() { try { ColumnList<String> sessions = baseCassandraDao.readWithKey( ColumnFamily.SESSIONS.getColumnFamily(), (gooruUId + SEPERATOR + SESSIONS), 0); for (Column<String> session : sessions) { if (session.getStringValue() != null && !sessionId.equalsIgnoreCase(session.getName()) && session.getStringValue().equalsIgnoreCase(START)) { ColumnList<String> sessionInfo = baseCassandraDao.readWithKey( ColumnFamily.SESSION_ACTIVITY.getColumnFamily(), session.getName(), 0); if (sessionInfo != null) { logger.info("Closing session : {}", session.getName()); long endTime = sessionInfo.getLongValue(_END_TIME, 0L); long totalTimeSpent = (sessionInfo.getLongValue(_END_TIME, 0L) - sessionInfo.getLongValue(_START_TIME, 0L)); ColumnList<String> eventDetail = baseCassandraDao.readWithKey( ColumnFamily.EVENTDETAIL.getColumnFamily(), sessionInfo.getStringValue(_EVENT_ID, null), 0); if (eventDetail != null) { baseCassandraDao.saveStringValue( ColumnFamily.SESSIONS.getColumnFamily(), (gooruUId + SEPERATOR + SESSIONS), session.getName(), STOP, 1); String eventField = eventDetail.getStringValue(FIELDS, null); JSONObject eventJson = new JSONObject(eventField); Event event = gson.fromJson(eventField, Event.class); event.setEndTime(endTime); JSONObject metrics = new JSONObject(event.getMetrics()); metrics.put(TOTALTIMEINMS, totalTimeSpent); metrics.put(VIEWS_COUNT, 1L); JSONObject context = new JSONObject(event.getContext()); context.put(TYPE, STOP); context.put(LOGGED_BY, SYSTEM); eventJson.put(METRICS, metrics.toString()); eventJson.put(CONTEXT, context.toString()); StringEntity eventEntity = new StringEntity("[" + eventJson + "]"); HttpPost postRequest = new HttpPost(restPoint + LOGGING_URL + apiKey); postRequest.setEntity(eventEntity); postRequest.setHeader(CONTENT_TYPE, CONTENT_TYPE_VALUES); httpClient = new DefaultHttpClient(); HttpResponse response = httpClient.execute(postRequest); logger.info("Status : {} ", response.getStatusLine().getStatusCode()); logger.info("System logged Event : {} ", eventJson); } } } } } catch (Exception e) { logger.error("Error while closing events", e); } }
@Override public Message peekMessage(String messageId) throws MessageQueueException { String[] parts = splitCompositeKey(messageId); String shardKey = parts[0]; MessageQueueEntry entry = new MessageQueueEntry(parts[1]); try { Column<MessageQueueEntry> column = keyspace .prepareQuery(queueColumnFamily) .setConsistencyLevel(consistencyLevel) .getKey(shardKey) .getColumn(entry) .execute() .getResult(); try { ByteArrayInputStream bais = new ByteArrayInputStream(column.getByteArrayValue()); return mapper.readValue(bais, Message.class); } catch (Exception e) { LOG.warn("Error parsing message", e); // Error parsing the message so we pass it on to the invalid message handler. try { return invalidMessageHandler.apply(column.getStringValue()); } catch (Exception e2) { LOG.warn("Error handling invalid message message", e2); throw new MessageQueueException("Error parsing message " + messageId); } } } catch (NotFoundException e) { return null; } catch (ConnectionException e) { throw new MessageQueueException("Error getting message " + messageId, e); } }
/** * Return history for a single key for the specified time range * * <p>TODO: honor the time range :) */ @Override public List<MessageHistory> getKeyHistory(String key, Long startTime, Long endTime, int count) throws MessageQueueException { List<MessageHistory> list = Lists.newArrayList(); ColumnList<UUID> columns; try { columns = keyspace .prepareQuery(historyColumnFamily) .setConsistencyLevel(consistencyLevel) .getRow(key) .execute() .getResult(); } catch (ConnectionException e) { throw new MessageQueueException("Failed to load history for " + key, e); } for (Column<UUID> column : columns) { try { list.add(deserializeString(column.getStringValue(), MessageHistory.class)); } catch (Exception e) { LOG.info("Error deserializing history entry", e); } } return list; }
/** * Extract a message body from a column * * @param column * @return */ Message extractMessageFromColumn(Column<MessageQueueEntry> column) { // Next, parse the message metadata and add a timeout entry Message message = null; try { ByteArrayInputStream bais = new ByteArrayInputStream(column.getByteArrayValue()); message = mapper.readValue(bais, Message.class); } catch (Exception e) { LOG.warn("Error processing message ", e); try { message = invalidMessageHandler.apply(column.getStringValue()); } catch (Exception e2) { LOG.warn("Error processing invalid message", e2); } } return message; }
@Override boolean removeColumn( String recordKey, Column<CompositeColumnName> column, String className, RowMutator mutator, Map<String, List<Column<CompositeColumnName>>> fieldColumnMap) { String rowKey = getRowKey(column); ColumnListMutation<IndexColumnName> indexColList = mutator.getIndexColumnList(indexCF, rowKey); UUID uuid = column.getName().getTimeUUID(); IndexColumnName indexEntry = new IndexColumnName(className, recordKey, column.getStringValue(), uuid); indexColList.deleteColumn(indexEntry); return true; }
private List<MessageContext> readMessagesInternal( String shardName, int itemsToPop, int lockColumnCount, MessageQueueEntry lockColumn, ColumnListMutation<MessageQueueEntry> rowMutation, MutationBatch m, long curTimeMicros) throws BusyLockException, MessageQueueException { try { List<MessageContext> entries = Lists.newArrayList(); RangeEndpoint re = ShardedDistributedMessageQueue.entrySerializer .makeEndpoint((byte) MessageQueueEntryType.Message.ordinal(), Equality.EQUAL) .append((byte) 0, Equality.EQUAL); if (lockColumn != null) { re.append(lockColumn.getTimestamp(), Equality.LESS_THAN_EQUALS); } else { re.append(TimeUUIDUtils.getMicrosTimeUUID(curTimeMicros), Equality.LESS_THAN_EQUALS); } ColumnList<MessageQueueEntry> result = queue .keyspace .prepareQuery(queue.queueColumnFamily) .setConsistencyLevel(queue.consistencyLevel) .getKey(shardName) .withColumnRange( new RangeBuilder() .setLimit(itemsToPop + (lockColumn == null ? 0 : (lockColumnCount + 1))) .setEnd(re.toBytes()) .build()) .execute() .getResult(); for (Column<MessageQueueEntry> column : result) { if (itemsToPop == 0) { break; } MessageQueueEntry entry = column.getName(); switch (entry.getType()) { case Lock: // TODO: Track number of locks read and make sure we don't exceed itemsToPop // We have the lock if (lockColumn != null && entry.getState() == MessageQueueEntryState.Acquired) { if (!entry.getTimestamp().equals(lockColumn.getTimestamp())) { throw new BusyLockException("Someone else snuck in"); } } break; case Message: { try { itemsToPop--; // First, we always want to remove the old item String messageId = queue.getCompositeKey(shardName, entry.getMessageId()); rowMutation.deleteColumn(entry); // Next, parse the message metadata and add a timeout entry final Message message = queue.extractMessageFromColumn(column); // Update the message state if (message != null) { MessageContext context = new MessageContext(); context.setMessage(message); // Message has a trigger so we need to figure out if it is an // unfinished repeating trigger and re-add it. if (message.hasTrigger()) { // Read back all messageIds associated with this key and check to see if we have // duplicates. String groupRowKey = queue.getCompositeKey(queue.getName(), message.getKey()); try { // Use consistency level ColumnList<MessageMetadataEntry> columns = queue .keyspace .prepareQuery(queue.keyIndexColumnFamily) .getRow(groupRowKey) .withColumnRange( ShardedDistributedMessageQueue.metadataSerializer .buildRange() .greaterThanEquals( (byte) MessageMetadataEntryType.MessageId.ordinal()) .lessThanEquals( (byte) MessageMetadataEntryType.MessageId.ordinal()) .build()) .execute() .getResult(); MessageMetadataEntry mostRecentMessageMetadata = null; long mostRecentTriggerTime = 0; for (Column<MessageMetadataEntry> currMessageEntry : columns) { MessageQueueEntry pendingMessageEntry = MessageQueueEntry.fromMetadata(currMessageEntry.getName()); if (currMessageEntry.getTtl() == 0) { long currMessageTriggerTime = pendingMessageEntry.getTimestamp(TimeUnit.MICROSECONDS); // First message we found, so treat as the most recent if (mostRecentMessageMetadata == null) { mostRecentMessageMetadata = currMessageEntry.getName(); mostRecentTriggerTime = currMessageTriggerTime; } else { // This message's trigger time is after what we thought was the most // recent. // Discard the previous 'most' recent and accept this one instead if (currMessageTriggerTime > mostRecentTriggerTime) { LOG.warn( "Need to discard : " + entry.getMessageId() + " => " + mostRecentMessageMetadata.getName()); m.withRow( queue.keyIndexColumnFamily, queue.getCompositeKey(queue.getName(), message.getKey())) .putEmptyColumn( mostRecentMessageMetadata, queue.metadataDeleteTTL); mostRecentTriggerTime = currMessageTriggerTime; mostRecentMessageMetadata = currMessageEntry.getName(); } else { LOG.warn( "Need to discard : " + entry.getMessageId() + " => " + currMessageEntry.getName()); m.withRow( queue.keyIndexColumnFamily, queue.getCompositeKey(queue.getName(), message.getKey())) .putEmptyColumn( currMessageEntry.getName(), queue.metadataDeleteTTL); } } } } if (mostRecentMessageMetadata != null) { if (!mostRecentMessageMetadata.getName().endsWith(entry.getMessageId())) { throw new DuplicateMessageException("Duplicate trigger for " + messageId); } } } catch (NotFoundException e) { } catch (ConnectionException e) { throw new MessageQueueException("Error fetching row " + groupRowKey, e); } // Update the trigger final Message nextMessage; Trigger trigger = message.getTrigger().nextTrigger(); if (trigger != null) { nextMessage = message.clone(); nextMessage.setTrigger(trigger); context.setNextMessage(nextMessage); if (message.isAutoCommitTrigger()) { queue.fillMessageMutation(m, nextMessage); } } } // Message has a key so we remove this item from the messages by key index. // A timeout item will be added later if (message.hasKey()) { m.withRow( queue.keyIndexColumnFamily, queue.getCompositeKey(queue.getName(), message.getKey())) .putEmptyColumn( MessageMetadataEntry.newMessageId(messageId), queue.metadataDeleteTTL); LOG.debug( "Removing from key : " + queue.getCompositeKey(queue.getName(), message.getKey()) + " : " + messageId); if (message.isKeepHistory()) { MessageHistory history = context.getHistory(); history.setToken(entry.getTimestamp()); history.setStartTime(curTimeMicros); history.setTriggerTime(message.getTrigger().getTriggerTime()); history.setStatus(MessageStatus.RUNNING); try { m.withRow(queue.historyColumnFamily, message.getKey()) .putColumn( entry.getTimestamp(), queue.serializeToString(history), queue.metadata.getHistoryTtl()); } catch (Exception e) { LOG.warn("Error serializing history for key '" + message.getKey() + "'", e); } } } // Message has a timeout so we add a timeout event. if (message.getTimeout() > 0) { MessageQueueEntry timeoutEntry = MessageQueueEntry.newMessageEntry( (byte) 0, TimeUUIDUtils.getMicrosTimeUUID( curTimeMicros + TimeUnit.MICROSECONDS.convert( message.getTimeout(), TimeUnit.SECONDS) + (queue.counter.incrementAndGet() % 1000)), MessageQueueEntryState.Busy); message.setToken(timeoutEntry.getTimestamp()); message.setRandom(timeoutEntry.getRandom()); m.withRow(queue.queueColumnFamily, queue.getShardKey(message)) .putColumn( timeoutEntry, column.getStringValue(), queue.metadata.getRetentionTimeout()); MessageMetadataEntry messageIdEntry = MessageMetadataEntry.newMessageId( queue.getCompositeKey( queue.getShardKey(message), timeoutEntry.getMessageId())); // Add the timeout column to the key if (message.hasKey()) { m.withRow( queue.keyIndexColumnFamily, queue.getCompositeKey(queue.getName(), message.getKey())) .putEmptyColumn(messageIdEntry, queue.metadata.getRetentionTimeout()); } context.setAckMessageId(messageIdEntry.getName()); } else { message.setToken(null); } // Update some stats switch (entry.getState()) { case Waiting: queue.stats.incProcessCount(); break; case Busy: queue.stats.incReprocessCount(); break; default: LOG.warn("Unknown message state: " + entry.getState()); // TODO: break; } entries.add(context); } else { queue.stats.incInvalidMessageCount(); // TODO: Add to poison queue } } catch (DuplicateMessageException e) { // OK to ignore this error. All the proper columns will have been deleted in the // batch. } break; } default: { // TODO: Error: Unknown type break; } } } return entries; } catch (BusyLockException e) { queue.stats.incLockContentionCount(); throw e; } catch (Exception e) { throw new MessageQueueException("Error processing queue shard : " + shardName, e); } finally { try { m.execute(); } catch (Exception e) { throw new MessageQueueException("Error processing queue shard : " + shardName, e); } } }
public static void main(String args[]) throws ConnectionException { String[] calles_28001 = {"Alcala", "Preciados", "Gran Via", "Princesa"}; String[] calles_28002 = {"Castellana", "Goya", "Serrano", "Velazquez"}; int index_28001 = 0; int index_28002 = 0; List<User> users = new ArrayList<User>(); for (int i = 0; i < 10; i++) { String id = (i + 1) + ""; String email = "user" + id + "@void.com"; String nombre = "nombre_" + id; String cp; String calle; if (i % 2 == 0) { cp = "28001"; calle = calles_28001[index_28001]; index_28001++; index_28001 = index_28001 % 4; } else { cp = "28002"; calle = calles_28002[index_28002]; index_28002++; index_28002 = index_28002 % 4; } User user = new User(id, email, nombre, cp, calle); users.add(user); } // conectar y crear column family Keyspace ksUsers = Utils.getKeyspace("utad"); String columnFamily = "compositeKeys"; ColumnFamily<String, String> cfUsers = new ColumnFamily<String, String>( columnFamily, StringSerializer.get(), StringSerializer.get()); try { ksUsers.dropColumnFamily(columnFamily); } catch (Exception e) { System.out.println("No existe el column family a borrar: " + columnFamily); } try { ksUsers.createColumnFamily( cfUsers, ImmutableMap.<String, Object>builder() .put("key_validation_class", "BytesType") .put("comparator_type", "BytesType") .build()); } catch (Exception e) { System.out.println("Error creando el column family: " + columnFamily + " " + e.getMessage()); } MutationBatch m = ksUsers.prepareMutationBatch(); String rowKey = "usersByCPAddress"; ColumnListMutation<String> clm = m.withRow(cfUsers, rowKey); System.out.println("\nEscribimos los datos"); for (User user : users) { String id = user.id; String cp = user.cp; String nombre = user.nombre; String email = user.email; String calle = user.calle; // escribir String key = id + ":" + cp + ":" + calle; String value = id + ":" + nombre + ":" + email; clm.putColumn(key, value); ksUsers.prepareColumnMutation(cfUsers, rowKey, key).putValue(value, null).execute(); } // leer el resultado System.out.println("\nLeer el resultado"); RowQuery<String, String> query = ksUsers .prepareQuery(cfUsers) .getKey(rowKey) .withColumnRange(new RangeBuilder().build()) .autoPaginate(true); ColumnList<String> columns = query.execute().getResult(); for (Column<String> c : columns) { String key = c.getName(); String value = c.getStringValue(); System.out.println("\nclave"); String[] ksplit = key.split(":"); for (String string : ksplit) { System.out.println("\t" + string); } System.out.println("valor"); String[] kvalue = value.split(":"); for (String string : kvalue) { System.out.println("\t" + string); } } }