/** * Get Number of <String,String> type columns in a given row in a cassandra column family * * @param rowName row Name we are querying for * @param columnFamilyName columnFamilName * @param keyspace * @param count * @return */ public static ColumnSlice<String, String> getStringTypeColumnsInARow( String rowName, String columnFamilyName, Keyspace keyspace, int count) throws CassandraDataAccessException { if (keyspace == null) { throw new CassandraDataAccessException("Can't access Data , no keyspace provided "); } if (columnFamilyName == null || rowName == null) { throw new CassandraDataAccessException( "Can't access data with columnFamily = " + columnFamilyName + " and rowName=" + rowName); } try { SliceQuery sliceQuery = HFactory.createSliceQuery(keyspace, stringSerializer, stringSerializer, stringSerializer); sliceQuery.setKey(rowName); sliceQuery.setColumnFamily(columnFamilyName); sliceQuery.setRange("", "", false, count); QueryResult<ColumnSlice<String, String>> result = sliceQuery.execute(); ColumnSlice<String, String> columnSlice = result.get(); return columnSlice; } catch (Exception e) { throw new CassandraDataAccessException( "Error while getting data from : " + columnFamilyName, e); } }
private List<String> getTags( Keyspace keyspaceOperator, String columnfamily, String cfId, String startRange, String endRange) { SliceQuery<String, String, String> query = HFactory.createSliceQuery( keyspaceOperator, StringSerializer.get(), StringSerializer.get(), StringSerializer.get()) .setColumnFamily(columnfamily) .setKey(cfId) .setRange(TAG_COLUMN_MIN_NAME, TAG_COLUMN_MAX_NAME, false, Integer.MAX_VALUE); QueryResult<ColumnSlice<String, String>> queryResult = query.execute(); ColumnSlice<String, String> columnSlice = queryResult.get(); SortedSet<String> tags = new TreeSet<String>(); for (HColumn<String, String> hColumn : columnSlice.getColumns()) { tags.add(hColumn.getValue()); } return new ArrayList<String>(tags); }
/** * Get set of messages in a column family * * @param queueName QueueName * @param columnFamilyName ColumnFamilyName * @param keyspace Cassandra KeySpace * @param count max message count limit * @return ColumnSlice which contain the messages * @throws CassandraDataAccessException */ public static ColumnSlice<Long, byte[]> getMessagesFromQueue( String queueName, String columnFamilyName, Keyspace keyspace, int count) throws CassandraDataAccessException { if (keyspace == null) { throw new CassandraDataAccessException("Can't access Data , no keyspace provided "); } if (columnFamilyName == null || queueName == null) { throw new CassandraDataAccessException( "Can't access data with columnFamily = " + columnFamilyName + " and queueName=" + queueName); } try { SliceQuery<String, Long, byte[]> sliceQuery = HFactory.createSliceQuery( keyspace, stringSerializer, longSerializer, bytesArraySerializer); sliceQuery.setKey(queueName); sliceQuery.setRange((long) 0, Long.MAX_VALUE, false, count); sliceQuery.setColumnFamily(columnFamilyName); QueryResult<ColumnSlice<Long, byte[]>> result = sliceQuery.execute(); ColumnSlice<Long, byte[]> columnSlice = result.get(); return columnSlice; } catch (Exception e) { throw new CassandraDataAccessException( "Error while getting data from " + columnFamilyName, e); } }
/** * Get List of Strings in a Given ROW in a Cassandra Column Family Here we assume that the columns * in a given row have string data and key and value in the given column in that row have same * values. * * @param columnFamilyName Name of the column Family * @param rowName Row name * @param keyspace keySpace * @return List of string in that given row. * @throws CassandraDataAccessException In case of database access error or data error */ public static List<String> getRowList(String columnFamilyName, String rowName, Keyspace keyspace) throws CassandraDataAccessException { ArrayList<String> rowList = new ArrayList<String>(); if (keyspace == null) { throw new CassandraDataAccessException("Can't access Data , no keyspace provided "); } if (columnFamilyName == null || rowName == null) { throw new CassandraDataAccessException( "Can't access data with columnFamily =" + columnFamilyName + " and rowName=" + rowName); } try { SliceQuery<String, String, String> sliceQuery = HFactory.createSliceQuery(keyspace, stringSerializer, stringSerializer, stringSerializer); sliceQuery.setKey(rowName); sliceQuery.setColumnFamily(columnFamilyName); sliceQuery.setRange("", "", false, 10000); QueryResult<ColumnSlice<String, String>> result = sliceQuery.execute(); ColumnSlice<String, String> columnSlice = result.get(); for (HColumn<String, String> column : columnSlice.getColumns()) { rowList.add(column.getName()); } } catch (Exception e) { throw new CassandraDataAccessException( "Error while accessing data from :" + columnFamilyName, e); } return rowList; }
@Override public String getLastRevision(String executionPlanIdentifier) { ColumnSlice<String, byte[]> cs; String rangeStart = new StringBuffer(String.valueOf(timeAt1970.getTime())).append("_").toString(); boolean firstLoop = true; while (true) { SliceQuery<String, String, byte[]> q = HFactory.createSliceQuery(keyspace, sser, sser, bser); q.setColumnFamily(INDEX_COLUMN_FAMILY_NAME) .setKey(executionPlanIdentifier) .setRange(rangeStart, String.valueOf(Long.MAX_VALUE), false, 1000); QueryResult<ColumnSlice<String, byte[]>> r = q.execute(); cs = r.get(); int size = cs.getColumns().size(); if (firstLoop && size == 0) { return null; } else if (size == 0) { return rangeStart; } else { firstLoop = false; } int lastIndex = size - 1; rangeStart = cs.getColumns().get(lastIndex).getName(); if (size < 1000) { break; } } log.info("found revision " + rangeStart); return rangeStart; }
@Override public PersistenceObject load( PersistenceManagementEvent persistenceManagementEvent, String nodeId) { List<NodeSnapshot> list = new ArrayList<NodeSnapshot>(); ColumnSlice<String, byte[]> cs; SliceQuery<String, String, byte[]> q = HFactory.createSliceQuery(keyspace, sser, sser, bser); q.setColumnFamily(COLUMN_FAMILY_NAME) .setKey(persistenceManagementEvent.getRevision()) .setRange("", "", false, 1000) .setColumnNames(nodeId); QueryResult<ColumnSlice<String, byte[]>> r = q.execute(); cs = r.get(); PersistenceObject persistenceObject = null; for (HColumn<String, byte[]> hc : cs.getColumns()) { persistenceObject = (PersistenceObject) ByteSerializer.BToO(hc.getValue()); // list.add(new NodeSnapshot(hc.getName(), hc.getValue())); } // return list; return persistenceObject; }
public Queue getQueue(String queuePath, UUID queueId) { SliceQuery<UUID, String, ByteBuffer> q = createSliceQuery(cass.getApplicationKeyspace(applicationId), ue, se, be); q.setColumnFamily(QUEUE_PROPERTIES.getColumnFamily()); q.setKey(queueId); q.setRange(null, null, false, ALL_COUNT); QueryResult<ColumnSlice<String, ByteBuffer>> r = q.execute(); ColumnSlice<String, ByteBuffer> slice = r.get(); List<HColumn<String, ByteBuffer>> results = slice.getColumns(); return deserializeQueue(results); }
@Override public Message getMessage(UUID messageId) { SliceQuery<UUID, String, ByteBuffer> q = createSliceQuery(cass.getApplicationKeyspace(applicationId), ue, se, be); q.setColumnFamily(MESSAGE_PROPERTIES.getColumnFamily()); q.setKey(messageId); q.setRange(null, null, false, ALL_COUNT); QueryResult<ColumnSlice<String, ByteBuffer>> r = q.execute(); ColumnSlice<String, ByteBuffer> slice = r.get(); List<HColumn<String, ByteBuffer>> results = slice.getColumns(); return deserializeMessage(results); }
@Override public Set<String> getQueueCounterNames(String queuePath) throws Exception { Set<String> names = new HashSet<String>(); Keyspace ko = cass.getApplicationKeyspace(applicationId); SliceQuery<String, String, ByteBuffer> q = createSliceQuery(ko, se, se, be); q.setColumnFamily(QueuesCF.QUEUE_DICTIONARIES.toString()); q.setKey(CassandraPersistenceUtils.key(getQueueId(queuePath), DICTIONARY_COUNTERS).toString()); q.setRange(null, null, false, ALL_COUNT); List<HColumn<String, ByteBuffer>> columns = q.execute().get().getColumns(); for (HColumn<String, ByteBuffer> column : columns) { names.add(column.getName()); } return names; }
public void runQuery() throws IOException { MultigetSliceQuery<DataPointsRowKey, Integer, ByteBuffer> msliceQuery = HFactory.createMultigetSliceQuery( m_keyspace, ROW_KEY_SERIALIZER, IntegerSerializer.get(), ByteBufferSerializer.get()); msliceQuery.setColumnFamily(m_columnFamily); msliceQuery.setKeys(m_rowKeys); if (m_descending) msliceQuery.setRange(m_endTime, m_startTime, true, m_multiRowReadSize); else msliceQuery.setRange(m_startTime, m_endTime, false, m_multiRowReadSize); Rows<DataPointsRowKey, Integer, ByteBuffer> rows = msliceQuery.execute().get(); List<Row<DataPointsRowKey, Integer, ByteBuffer>> unfinishedRows = new ArrayList<Row<DataPointsRowKey, Integer, ByteBuffer>>(); for (Row<DataPointsRowKey, Integer, ByteBuffer> row : rows) { List<HColumn<Integer, ByteBuffer>> columns = row.getColumnSlice().getColumns(); if (!m_limit && columns.size() == m_multiRowReadSize) unfinishedRows.add(row); writeColumns(row.getKey(), columns); } // Iterate through the unfinished rows and get the rest of the data. // todo: use multiple threads to retrieve this data for (Row<DataPointsRowKey, Integer, ByteBuffer> unfinishedRow : unfinishedRows) { DataPointsRowKey key = unfinishedRow.getKey(); SliceQuery<DataPointsRowKey, Integer, ByteBuffer> sliceQuery = HFactory.createSliceQuery( m_keyspace, ROW_KEY_SERIALIZER, IntegerSerializer.get(), ByteBufferSerializer.get()); sliceQuery.setColumnFamily(m_columnFamily); sliceQuery.setKey(key); List<HColumn<Integer, ByteBuffer>> columns = unfinishedRow.getColumnSlice().getColumns(); do { Integer lastTime = columns.get(columns.size() - 1).getName(); if (m_descending) sliceQuery.setRange(lastTime - 1, m_startTime, true, m_singleRowReadSize); else sliceQuery.setRange(lastTime + 1, m_endTime, false, m_singleRowReadSize); columns = sliceQuery.execute().get().getColumns(); writeColumns(key, columns); } while (columns.size() == m_singleRowReadSize); } }
public PlaceLogRecord findPlaceLogRecord(String id) { if (id == null) return null; SliceQuery<UUID, String, String> q = HFactory.createSliceQuery(KeyspaceFactory.get(), us, ss, ss); q.setColumnFamily(PLACE_LOG_RECORD) .setKey(UUID.fromString(id)) .setColumnNames("url", "app_version", "user_agent", "log_level", "username"); QueryResult<ColumnSlice<String, String>> r = q.execute(); PlaceLogRecord record = new PlaceLogRecord(); record.setId(id); record.setVersion(0); record.setAppVersion(getString(r.get().getColumnByName("app_version"))); record.setLogLevel(getString(r.get().getColumnByName("log_level"))); record.setUrl(getString(r.get().getColumnByName("url"))); record.setUserAgent(getString(r.get().getColumnByName("user_agent"))); record.setUsername(getString(r.get().getColumnByName("username"))); return record; }
@Override public Iterator<HColumn<String, String>> queryBrigesTimeStats( Node[] query, String keyspace, String start_time, String end_time) throws StoreException { // SPO, OSP cfs have one node as key and two nodes as colname Node[] nxKey = new Node[] {query[0]}; String key = query[0].toN3(); int colNameTupleLength = 1; SliceQuery<String, String, String> sq = HFactory.createSliceQuery(getExistingKeyspace(keyspace), _ss, _ss, _ss) .setColumnFamily(CF_S_PO) .setRange(start_time, end_time, false, Integer.MAX_VALUE) .setKey(key); QueryResult<ColumnSlice<String, String>> results = sq.execute(); List<HColumn<String, String>> columns = results.get().getColumns(); Iterator<HColumn<String, String>> it_columns = columns.iterator(); return it_columns; }
@PreAuthorize("hasRole('ROLE_ADMINISTRATOR')") public List<PlaceLogRecord> findPlaceLogRecordsByUsername( String username, Long startKey, Integer count) { if (username == null || username.isEmpty()) return null; SliceQuery<String, Long, UUID> q1 = HFactory.createSliceQuery(KeyspaceFactory.get(), ss, ls, us); q1.setColumnFamily(PLACE_LOG_RECORD_TIMELINE) .setKey(username) .setRange(startKey, null, true, count); QueryResult<ColumnSlice<Long, UUID>> r1 = q1.execute(); List<PlaceLogRecord> records = new ArrayList<PlaceLogRecord>(); for (HColumn<Long, UUID> column : r1.get().getColumns()) { PlaceLogRecord record = new PlaceLogRecord(); record = findPlaceLogRecord(column.getValue().toString()); record.setTimestamp(column.getName()); records.add(record); } return records; }
@Override public Iterator<Node[]> query(Node[] query, int limit, String keyspace) throws StoreException { Iterator<Node[]> it = super.query(query, limit, keyspace); if (it != null) { return it; } String columnFamily = selectColumnFamily(query); int[] map = _maps.get(columnFamily); Node[] q = Util.reorder(query, map); // _log.info("query: " + Nodes.toN3(query) + " idx: " + columnFamily + " reordered: " + // Nodes.toN3(q)); if (isVariable(q[0])) { // scan over all throw new UnsupportedOperationException("triple patterns must have at least one constant"); } else { if (columnFamily.equals(CF_PO_S)) { if (isVariable(q[1])) { // we use a secondary index for P only when no other constant is given IndexedSlicesQuery<byte[], String, String> isq = HFactory.createIndexedSlicesQuery(getExistingKeyspace(keyspace), _bs, _ss, _ss) .setColumnFamily(columnFamily) .addEqualsExpression("!p", q[0].toN3()) .setReturnKeysOnly(); it = new HashIndexedSlicesQueryIterator( isq, map, limit, columnFamily, getExistingKeyspace(keyspace)); } else { // here we always have a PO lookup, POS (=SPO) is handled by OSP or SPO // we retrieve all columns from a single row // in POS the keys are hashes, we retrieve P and O from columns !p and !o ByteBuffer key = createKey(new Node[] {q[0], q[1]}); SliceQuery<byte[], String, String> sq = HFactory.createSliceQuery(getExistingKeyspace(keyspace), _bs, _ss, _ss) .setColumnFamily(columnFamily) .setKey(key.array()) .setRange("!o", "!p", false, 2); QueryResult<ColumnSlice<String, String>> res = sq.execute(); if (res.get().getColumns().size() == 0) return new ArrayList<Node[]>().iterator(); Node[] nxKey = new Node[2]; try { nxKey[0] = NxParser.parseNode(res.get().getColumnByName("!p").getValue()); nxKey[1] = NxParser.parseNode(res.get().getColumnByName("!o").getValue()); } catch (ParseException e) { e.printStackTrace(); } it = new ColumnSliceIterator<byte[]>(sq, nxKey, "<", "", map, limit, 1); } } else { String startRange = "", endRange = ""; if (!isVariable(q[1])) { // if there is more than one constant, we need to modify the range startRange = q[1].toN3(); endRange = startRange + "_"; if (!isVariable(q[2])) { startRange = Nodes.toN3(new Node[] {q[1], q[2]}); endRange = startRange; } } // SPO, OSP cfs have one node as key and two nodes as colname Node[] nxKey = new Node[] {q[0]}; String key = q[0].toN3(); int colNameTupleLength = 2; SliceQuery<String, String, String> sq = HFactory.createSliceQuery(getExistingKeyspace(keyspace), _ss, _ss, _ss) .setColumnFamily(columnFamily) .setKey(key); it = new ColumnSliceIterator<String>( sq, nxKey, startRange, endRange, map, limit, colNameTupleLength); } } return it; }
/** * Get a list of UUIDs that can be read for the client. This comes directly from the queue inbox, * and DOES NOT take into account client messages * * @param queueId The queue id to read * @param bounds The bounds to use when reading * @return */ protected List<UUID> getQueueRange(UUID queueId, QueueBounds bounds, SearchParam params) { if (bounds == null) { logger.error("Necessary queue bounds not found"); throw new QueueException("Neccessary queue bounds not found"); } UUID finish_uuid = params.reversed ? bounds.getOldest() : bounds.getNewest(); List<UUID> results = new ArrayList<UUID>(params.limit); UUID start = params.startId; if (start == null) { start = params.reversed ? bounds.getNewest() : bounds.getOldest(); } if (start == null) { logger.error("No first message in queue"); return results; } if (finish_uuid == null) { logger.error("No last message in queue"); return results; } long start_ts_shard = roundLong(getTimestampInMillis(start), QUEUE_SHARD_INTERVAL); long finish_ts_shard = roundLong(getTimestampInMillis(finish_uuid), QUEUE_SHARD_INTERVAL); long current_ts_shard = start_ts_shard; if (params.reversed) { current_ts_shard = finish_ts_shard; } while ((current_ts_shard >= start_ts_shard) && (current_ts_shard <= finish_ts_shard)) { UUID slice_start = MIN_TIME_UUID; UUID slice_end = MAX_TIME_UUID; if (current_ts_shard == start_ts_shard) { slice_start = start; } if (current_ts_shard == finish_ts_shard) { slice_end = finish_uuid; } SliceQuery<ByteBuffer, UUID, ByteBuffer> q = createSliceQuery(ko, be, ue, be); q.setColumnFamily(QUEUE_INBOX.getColumnFamily()); q.setKey(getQueueShardRowKey(queueId, current_ts_shard)); q.setRange(slice_start, slice_end, params.reversed, params.limit + 1); List<HColumn<UUID, ByteBuffer>> cassResults = q.execute().get().getColumns(); for (int i = 0; i < cassResults.size(); i++) { HColumn<UUID, ByteBuffer> column = cassResults.get(i); // skip the first one, we've already read it if (i == 0 && params.skipFirst && params.startId.equals(column.getName())) { continue; } UUID id = column.getName(); results.add(id); logger.debug("Added id '{}' to result set for queue id '{}'", id, queueId); if (results.size() >= params.limit) { return results; } } if (params.reversed) { current_ts_shard -= QUEUE_SHARD_INTERVAL; } else { current_ts_shard += QUEUE_SHARD_INTERVAL; } } return results; }