public Set<String> findUserIdsConnectedTo(String providerId, Set<String> providerUserIds) { ResultSet rs = null; Set<String> localUserIds = null; Statement getUserIds = null; try { getUserIds = QueryBuilder.select() .column("userid") .from(keyspace, table) .where(QueryBuilder.eq("providerId", providerId)) .and(QueryBuilder.in("providerUserId", providerUserIds)); rs = session.execute(getUserIds); if (rs.all().size() > 0) { localUserIds = new HashSet<String>(); } for (Row row : rs.all()) { localUserIds.add(row.getString("userId")); } return localUserIds; } catch (Exception e) { e.printStackTrace(); } return null; }
public List<String> findUserIdsWithConnection(Connection<?> connection) { ResultSet rs = null; List<String> localUserIds = null; ConnectionKey key = connection.getKey(); Statement getUserIds = null; try { getUserIds = QueryBuilder.select() .column("userid") .from(keyspace, table) .allowFiltering() .where(QueryBuilder.eq("providerId", key.getProviderId())) .and(QueryBuilder.eq("providerUserId", key.getProviderUserId())); rs = session.execute(getUserIds); List<Row> rows = rs.all(); if (rows.size() > 0) { localUserIds = new LinkedList<String>(); } for (Row row : rows) { localUserIds.add(row.getString("userId")); } return localUserIds; } catch (Exception e) { e.printStackTrace(); } return null; }
@SuppressWarnings("unchecked") public <T> List<T> recoverObjet(Class<T> bean, ResultSet resultSet) { List<T> listObjList = new LinkedList<T>(); for (Row row : resultSet.all()) { Map<String, Definition> mapDefinition = createMapDefinition(row.getColumnDefinitions()); Object newObjetc = createObject(bean, row, mapDefinition); listObjList.add((T) newObjetc); } return listObjList; }
@Test public void testCassandraBatchInsertAndSelectStatement() throws Exception { List<Book> books = getBookList(5); this.cassandraMessageHandler2.handleMessage(new GenericMessage<>(books)); Message<?> message = MessageBuilder.withPayload("Cassandra Guru").setHeader("limit", 2).build(); this.cassandraMessageHandler4.handleMessage(message); Message<?> receive = this.resultChannel.receive(10000); assertNotNull(receive); assertThat(receive.getPayload(), instanceOf(ResultSet.class)); ResultSet resultSet = (ResultSet) receive.getPayload(); assertNotNull(resultSet); List<Row> rows = resultSet.all(); assertEquals(2, rows.size()); this.cassandraMessageHandler1.handleMessage( new GenericMessage<>(QueryBuilder.truncate("book"))); }
@Override public Integer call() throws Exception { List<Row> rows = resultSet.all(); if (rows.isEmpty()) { log.debug("No " + bucket + " data to migrate for schedule id " + scheduleId); return 0; } Date time = rows.get(0).getDate(1); Date nextTime; Double max = null; Double min = null; Double avg = null; Long writeTime = rows.get(0).getLong(5); Integer ttl = rows.get(0).getInt(4); for (Row row : rows) { if (writeFailed) { throw new Exception( "Migration of " + bucket + " data for schedule id " + scheduleId + " failed"); } nextTime = row.getDate(1); if (nextTime.equals(time)) { int type = row.getInt(2); switch (type) { case 0: max = row.getDouble(3); break; case 1: min = row.getDouble(3); break; default: avg = row.getDouble(3); } } else { if (isDataMissing(avg, max, min)) { log.debug( "We only have a partial " + bucket + " metric for {scheduleId: " + scheduleId + ", time: " + time.getTime() + "}. It will not be migrated."); } else { ResultSetFuture writeFuture = writeMetrics(time, avg, max, min, ttl, writeTime); Futures.addCallback(writeFuture, this); } time = nextTime; max = row.getDouble(3); min = null; avg = null; ttl = row.getInt(4); writeTime = row.getLong(5); } } if (writeFailed) { throw new Exception( "Migration of " + bucket + " data for schedule id " + scheduleId + " failed"); } return metricsMigrated.get(); }