public void udtSerDeserTest(int version) throws Exception { ListType<?> lt = ListType.getInstance(Int32Type.instance, true); SetType<?> st = SetType.getInstance(UTF8Type.instance, true); MapType<?, ?> mt = MapType.getInstance(UTF8Type.instance, LongType.instance, true); UserType udt = new UserType( "ks", bb("myType"), Arrays.asList(bb("f1"), bb("f2"), bb("f3"), bb("f4")), Arrays.asList(LongType.instance, lt, st, mt)); Map<ColumnIdentifier, Term.Raw> value = new HashMap<>(); value.put(ci("f1"), lit(42)); value.put(ci("f2"), new Lists.Literal(Arrays.<Term.Raw>asList(lit(3), lit(1)))); value.put(ci("f3"), new Sets.Literal(Arrays.<Term.Raw>asList(lit("foo"), lit("bar")))); value.put( ci("f4"), new Maps.Literal( Arrays.<Pair<Term.Raw, Term.Raw>>asList( Pair.<Term.Raw, Term.Raw>create(lit("foo"), lit(24)), Pair.<Term.Raw, Term.Raw>create(lit("bar"), lit(12))))); UserTypes.Literal u = new UserTypes.Literal(value); Term t = u.prepare("ks", columnSpec("myValue", udt)); QueryOptions options = QueryOptions.DEFAULT; if (version == 2) options = QueryOptions.fromProtocolV2(ConsistencyLevel.ONE, Collections.<ByteBuffer>emptyList()); else if (version != 3) throw new AssertionError("Invalid protocol version for test"); ByteBuffer serialized = t.bindAndGet(options); ByteBuffer[] fields = udt.split(serialized); assertEquals(4, fields.length); assertEquals(bytes(42L), fields[0]); // Note that no matter what the protocol version has been used in bindAndGet above, the // collections inside // a UDT should alway be serialized with version 3 of the protocol. Which is why we don't use // 'version' // on purpose below. assertEquals( Arrays.asList(3, 1), lt.getSerializer().deserializeForNativeProtocol(fields[1], 3)); LinkedHashSet<String> s = new LinkedHashSet<>(); s.addAll(Arrays.asList("bar", "foo")); assertEquals(s, st.getSerializer().deserializeForNativeProtocol(fields[2], 3)); LinkedHashMap<String, Long> m = new LinkedHashMap<>(); m.put("bar", 12L); m.put("foo", 24L); assertEquals(m, mt.getSerializer().deserializeForNativeProtocol(fields[3], 3)); }
private AbstractType<?> getTypeAndRemove( Map<ColumnIdentifier, AbstractType> columns, ColumnIdentifier t) throws InvalidRequestException { AbstractType type = columns.get(t); if (type == null) throw new InvalidRequestException( String.format("Unknown definition %s referenced in PRIMARY KEY", t)); if (type instanceof CollectionType) throw new InvalidRequestException( String.format("Invalid collection type for PRIMARY KEY component %s", t)); columns.remove(t); Boolean isReversed = definedOrdering.get(t); return isReversed != null && isReversed ? ReversedType.getInstance(type) : type; }
public Collection<RowMutation> getMutations( List<ByteBuffer> variables, boolean local, ConsistencyLevel cl, long now) throws RequestExecutionException, RequestValidationException { // keys List<ByteBuffer> keys = UpdateStatement.buildKeyNames(cfDef, processedKeys, variables); // columns ColumnNameBuilder builder = cfDef.getColumnNameBuilder(); CFDefinition.Name firstEmpty = UpdateStatement.buildColumnNames(cfDef, processedKeys, builder, variables, false); boolean fullKey = builder.componentCount() == cfDef.columns.size(); boolean isRange = cfDef.isCompact ? !fullKey : (!fullKey || toRemove.isEmpty()); if (!toRemove.isEmpty() && isRange) throw new InvalidRequestException( String.format( "Missing mandatory PRIMARY KEY part %s since %s specified", firstEmpty, toRemove.iterator().next().left)); // Lists DISCARD operation incurs a read. Do that now. Set<ByteBuffer> toRead = null; for (Pair<CFDefinition.Name, Term> p : toRemove) { CFDefinition.Name name = p.left; Term value = p.right; if ((name.type instanceof ListType) && value != null) { if (toRead == null) toRead = new TreeSet<ByteBuffer>(UTF8Type.instance); toRead.add(name.name.key); } } Map<ByteBuffer, ColumnGroupMap> rows = toRead != null ? readRows(keys, builder, toRead, (CompositeType) cfDef.cfm.comparator, local, cl) : null; Collection<RowMutation> rowMutations = new ArrayList<RowMutation>(keys.size()); UpdateParameters params = new UpdateParameters(variables, getTimestamp(now), -1); for (ByteBuffer key : keys) rowMutations.add( mutationForKey( cfDef, key, builder, isRange, params, rows == null ? null : rows.get(key))); return rowMutations; }
// Column definitions private Map<ByteBuffer, ColumnDefinition> getColumns() { Map<ByteBuffer, ColumnDefinition> columnDefs = new HashMap<ByteBuffer, ColumnDefinition>(); Integer componentIndex = null; if (comparator instanceof CompositeType) { CompositeType ct = (CompositeType) comparator; componentIndex = ct.types.get(ct.types.size() - 1) instanceof ColumnToCollectionType ? ct.types.size() - 2 : ct.types.size() - 1; } for (Map.Entry<ColumnIdentifier, AbstractType> col : columns.entrySet()) { ColumnIdentifier id = col.getKey(); columnDefs.put( id.key, staticColumns.contains(id) ? ColumnDefinition.staticDef(id.key, col.getValue(), componentIndex) : ColumnDefinition.regularDef(id.key, col.getValue(), componentIndex)); } return columnDefs; }
public void collectionSerDeserTest(int version) throws Exception { // Lists ListType<?> lt = ListType.getInstance(Int32Type.instance, true); List<Integer> l = Arrays.asList(2, 6, 1, 9); List<ByteBuffer> lb = new ArrayList<>(l.size()); for (Integer i : l) lb.add(Int32Type.instance.decompose(i)); assertEquals( l, lt.getSerializer() .deserializeForNativeProtocol( CollectionSerializer.pack(lb, lb.size(), version), version)); // Sets SetType<?> st = SetType.getInstance(UTF8Type.instance, true); Set<String> s = new LinkedHashSet<>(); s.addAll(Arrays.asList("bar", "foo", "zee")); List<ByteBuffer> sb = new ArrayList<>(s.size()); for (String t : s) sb.add(UTF8Type.instance.decompose(t)); assertEquals( s, st.getSerializer() .deserializeForNativeProtocol( CollectionSerializer.pack(sb, sb.size(), version), version)); // Maps MapType<?, ?> mt = MapType.getInstance(UTF8Type.instance, LongType.instance, true); Map<String, Long> m = new LinkedHashMap<>(); m.put("bar", 12L); m.put("foo", 42L); m.put("zee", 14L); List<ByteBuffer> mb = new ArrayList<>(m.size() * 2); for (Map.Entry<String, Long> entry : m.entrySet()) { mb.add(UTF8Type.instance.decompose(entry.getKey())); mb.add(LongType.instance.decompose(entry.getValue())); } assertEquals( m, mt.getSerializer() .deserializeForNativeProtocol( CollectionSerializer.pack(mb, m.size(), version), version)); }
public void setOrdering(ColumnIdentifier alias, boolean reversed) { definedOrdering.put(alias, reversed); }
public void addDefinition(ColumnIdentifier def, CQL3Type type, boolean isStatic) { definedNames.add(def); definitions.put(def, type); if (isStatic) staticColumns.add(def); }
/** Transform this raw statement into a CreateTableStatement. */ public ParsedStatement.Prepared prepare() throws RequestValidationException { // Column family name if (!columnFamily().matches("\\w+")) throw new InvalidRequestException( String.format( "\"%s\" is not a valid column family name (must be alphanumeric character only: [0-9A-Za-z]+)", columnFamily())); if (columnFamily().length() > Schema.NAME_LENGTH) throw new InvalidRequestException( String.format( "Column family names shouldn't be more than %s characters long (got \"%s\")", Schema.NAME_LENGTH, columnFamily())); for (Multiset.Entry<ColumnIdentifier> entry : definedNames.entrySet()) if (entry.getCount() > 1) throw new InvalidRequestException( String.format("Multiple definition of identifier %s", entry.getElement())); properties.validate(); CreateTableStatement stmt = new CreateTableStatement(cfName, properties, ifNotExists, staticColumns); Map<ByteBuffer, CollectionType> definedCollections = null; for (Map.Entry<ColumnIdentifier, CQL3Type> entry : definitions.entrySet()) { ColumnIdentifier id = entry.getKey(); CQL3Type pt = entry.getValue(); if (pt.isCollection()) { if (definedCollections == null) definedCollections = new HashMap<ByteBuffer, CollectionType>(); definedCollections.put(id.key, (CollectionType) pt.getType()); } stmt.columns.put(id, pt.getType()); // we'll remove what is not a column below } if (keyAliases.isEmpty()) throw new InvalidRequestException("No PRIMARY KEY specifed (exactly one required)"); else if (keyAliases.size() > 1) throw new InvalidRequestException("Multiple PRIMARY KEYs specifed (exactly one required)"); List<ColumnIdentifier> kAliases = keyAliases.get(0); List<AbstractType<?>> keyTypes = new ArrayList<AbstractType<?>>(kAliases.size()); for (ColumnIdentifier alias : kAliases) { stmt.keyAliases.add(alias.key); AbstractType<?> t = getTypeAndRemove(stmt.columns, alias); if (t instanceof CounterColumnType) throw new InvalidRequestException( String.format("counter type is not supported for PRIMARY KEY part %s", alias)); if (staticColumns.contains(alias)) throw new InvalidRequestException( String.format("Static column %s cannot be part of the PRIMARY KEY", alias)); keyTypes.add(t); } stmt.keyValidator = keyTypes.size() == 1 ? keyTypes.get(0) : CompositeType.getInstance(keyTypes); // Dense means that no part of the comparator stores a CQL column name. This means // COMPACT STORAGE with at least one columnAliases (otherwise it's a thrift "static" CF). stmt.isDense = useCompactStorage && !columnAliases.isEmpty(); // Handle column aliases if (columnAliases.isEmpty()) { if (useCompactStorage) { // There should remain some column definition since it is a non-composite "static" CF if (stmt.columns.isEmpty()) throw new InvalidRequestException( "No definition found that is not part of the PRIMARY KEY"); if (definedCollections != null) throw new InvalidRequestException( "Collection types are not supported with COMPACT STORAGE"); stmt.comparator = CFDefinition.definitionType; } else { List<AbstractType<?>> types = new ArrayList<AbstractType<?>>(definedCollections == null ? 1 : 2); types.add(CFDefinition.definitionType); if (definedCollections != null) types.add(ColumnToCollectionType.getInstance(definedCollections)); stmt.comparator = CompositeType.getInstance(types); } } else { // If we use compact storage and have only one alias, it is a // standard "dynamic" CF, otherwise it's a composite if (useCompactStorage && columnAliases.size() == 1) { if (definedCollections != null) throw new InvalidRequestException( "Collection types are not supported with COMPACT STORAGE"); ColumnIdentifier alias = columnAliases.get(0); stmt.columnAliases.add(alias.key); stmt.comparator = getTypeAndRemove(stmt.columns, alias); if (stmt.comparator instanceof CounterColumnType) throw new InvalidRequestException( String.format("counter type is not supported for PRIMARY KEY part %s", alias)); if (staticColumns.contains(alias)) throw new InvalidRequestException( String.format("Static column %s cannot be part of the PRIMARY KEY", alias)); } else { List<AbstractType<?>> types = new ArrayList<AbstractType<?>>(columnAliases.size() + 1); for (ColumnIdentifier t : columnAliases) { stmt.columnAliases.add(t.key); AbstractType<?> type = getTypeAndRemove(stmt.columns, t); if (type instanceof CounterColumnType) throw new InvalidRequestException( String.format("counter type is not supported for PRIMARY KEY part %s", t)); if (staticColumns.contains(t)) throw new InvalidRequestException( String.format("Static column %s cannot be part of the PRIMARY KEY", t)); types.add(type); } if (useCompactStorage) { if (definedCollections != null) throw new InvalidRequestException( "Collection types are not supported with COMPACT STORAGE"); } else { // For sparse, we must add the last UTF8 component // and the collection type if there is one types.add(CFDefinition.definitionType); if (definedCollections != null) types.add(ColumnToCollectionType.getInstance(definedCollections)); } if (types.isEmpty()) throw new IllegalStateException("Nonsensical empty parameter list for CompositeType"); stmt.comparator = CompositeType.getInstance(types); } } if (!staticColumns.isEmpty()) { // Only CQL3 tables can have static columns if (useCompactStorage) throw new InvalidRequestException( "Static columns are not supported in COMPACT STORAGE tables"); // Static columns only make sense if we have at least one clustering column. Otherwise // everything is static anyway if (columnAliases.isEmpty()) throw new InvalidRequestException( "Static columns are only useful (and thus allowed) if the table has at least one clustering column"); } if (useCompactStorage && !stmt.columnAliases.isEmpty()) { if (stmt.columns.isEmpty()) { // The only value we'll insert will be the empty one, so the default validator don't // matter stmt.defaultValidator = BytesType.instance; // We need to distinguish between // * I'm upgrading from thrift so the valueAlias is null // * I've defined my table with only a PK (and the column value will be empty) // So, we use an empty valueAlias (rather than null) for the second case stmt.valueAlias = ByteBufferUtil.EMPTY_BYTE_BUFFER; } else { if (stmt.columns.size() > 1) throw new InvalidRequestException( String.format( "COMPACT STORAGE with composite PRIMARY KEY allows no more than one column not part of the PRIMARY KEY (got: %s)", StringUtils.join(stmt.columns.keySet(), ", "))); Map.Entry<ColumnIdentifier, AbstractType> lastEntry = stmt.columns.entrySet().iterator().next(); stmt.defaultValidator = lastEntry.getValue(); stmt.valueAlias = lastEntry.getKey().key; stmt.columns.remove(lastEntry.getKey()); } } else { // For compact, we are in the "static" case, so we need at least one column defined. For // non-compact however, having // just the PK is fine since we have CQL3 row marker. if (useCompactStorage && stmt.columns.isEmpty()) throw new InvalidRequestException( "COMPACT STORAGE with non-composite PRIMARY KEY require one column not part of the PRIMARY KEY, none given"); // There is no way to insert/access a column that is not defined for non-compact storage, so // the actual validator don't matter much (except that we want to recognize counter CF as // limitation apply to them). stmt.defaultValidator = !stmt.columns.isEmpty() && (stmt.columns.values().iterator().next() instanceof CounterColumnType) ? CounterColumnType.instance : BytesType.instance; } // If we give a clustering order, we must explicitly do so for all aliases and in the order of // the PK if (!definedOrdering.isEmpty()) { if (definedOrdering.size() > columnAliases.size()) throw new InvalidRequestException( "Only clustering key columns can be defined in CLUSTERING ORDER directive"); int i = 0; for (ColumnIdentifier id : definedOrdering.keySet()) { ColumnIdentifier c = columnAliases.get(i); if (!id.equals(c)) { if (definedOrdering.containsKey(c)) throw new InvalidRequestException( String.format( "The order of columns in the CLUSTERING ORDER directive must be the one of the clustering key (%s must appear before %s)", c, id)); else throw new InvalidRequestException( String.format("Missing CLUSTERING ORDER for column %s", c)); } ++i; } } return new ParsedStatement.Prepared(stmt); }