@Test
  public void testGetSliceFromLarge() throws Throwable {
    // tests slicing against 1000 columns in an sstable
    Table table = Table.open("Keyspace1");
    ColumnFamilyStore cfStore = table.getColumnFamilyStore("Standard1");
    String key = "row3";
    RowMutation rm = new RowMutation("Keyspace1", key);
    ColumnFamily cf = ColumnFamily.create("Keyspace1", "Standard1");
    for (int i = 1000; i < 2000; i++) cf.addColumn(column("col" + i, ("v" + i), 1L));
    rm.add(cf);
    rm.apply();
    cfStore.forceBlockingFlush();

    validateSliceLarge(cfStore);
    // compact so we have a big row with more than the minimum index count
    if (cfStore.getSSTables().size() > 1) {
      CompactionManager.instance.submitMajor(cfStore).get();
    }
    SSTableReader sstable = cfStore.getSSTables().iterator().next();
    DecoratedKey decKey = sstable.getPartitioner().decorateKey(key);
    SSTable.PositionSize info = sstable.getPosition(decKey);
    BufferedRandomAccessFile file = new BufferedRandomAccessFile(sstable.getFilename(), "r");
    file.seek(info.position);
    assert file.readUTF().equals(key);
    file.readInt();
    IndexHelper.skipBloomFilter(file);
    ArrayList<IndexHelper.IndexInfo> indexes = IndexHelper.deserializeIndex(file);
    assert indexes.size() > 2;
    validateSliceLarge(cfStore);
  }
  @Test
  public void testRoundTripStandardCf() throws IOException, ParseException {
    File tempSS = createTemporarySSTable("Keyspace1", "Standard1");
    ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Standard1");
    IPartitioner<?> partitioner = DatabaseDescriptor.getPartitioner();
    DataOutputBuffer dob = new DataOutputBuffer();
    SSTableWriter writer = new SSTableWriter(tempSS.getPath(), 2, partitioner);

    // Add rowA
    cfamily.addColumn(
        new QueryPath("Standard1", null, "name".getBytes()), "val".getBytes(), 1, false);
    ColumnFamily.serializer().serializeWithIndexes(cfamily, dob, false);
    writer.append(partitioner.decorateKey("rowA"), dob);
    dob.reset();
    cfamily.clear();

    // Add rowExclude
    cfamily.addColumn(
        new QueryPath("Standard1", null, "name".getBytes()), "val".getBytes(), 1, false);
    ColumnFamily.serializer().serializeWithIndexes(cfamily, dob, false);
    writer.append(partitioner.decorateKey("rowExclude"), dob);
    dob.reset();
    cfamily.clear();

    SSTableReader reader = writer.closeAndOpenReader();

    // Export to JSON and verify
    File tempJson = File.createTempFile("Standard1", ".json");
    SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[] {"rowExclude"});

    // Import JSON to another SSTable file
    File tempSS2 = createTemporarySSTable("Keyspace1", "Standard1");
    SSTableImport.importJson(tempJson.getPath(), "Keyspace1", "Standard1", tempSS2.getPath());

    reader = SSTableReader.open(tempSS2.getPath(), DatabaseDescriptor.getPartitioner());
    NamesQueryFilter qf =
        new NamesQueryFilter("rowA", new QueryPath("Standard1", null, null), "name".getBytes());
    ColumnFamily cf = qf.getSSTableColumnIterator(reader).getColumnFamily();
    assertTrue(cf != null);
    assertTrue(Arrays.equals(cf.getColumn("name".getBytes()).value(), hexToBytes("76616c")));

    qf =
        new NamesQueryFilter(
            "rowExclude", new QueryPath("Standard1", null, null), "name".getBytes());
    cf = qf.getSSTableColumnIterator(reader).getColumnFamily();
    assert cf == null;
  }