private static void validateColumns( String keyspace, String columnFamilyName, ByteBuffer superColumnName, Iterable<ByteBuffer> column_names) throws InvalidRequestException { if (superColumnName != null) { if (superColumnName.remaining() > IColumn.MAX_NAME_LENGTH) throw new InvalidRequestException( "supercolumn name length must not be greater than " + IColumn.MAX_NAME_LENGTH); if (superColumnName.remaining() == 0) throw new InvalidRequestException("supercolumn name must not be empty"); if (DatabaseDescriptor.getColumnFamilyType(keyspace, columnFamilyName) == ColumnFamilyType.Standard) throw new InvalidRequestException( "supercolumn specified to ColumnFamily " + columnFamilyName + " containing normal columns"); } AbstractType comparator = ColumnFamily.getComparatorFor(keyspace, columnFamilyName, superColumnName); for (ByteBuffer name : column_names) { if (name.remaining() > IColumn.MAX_NAME_LENGTH) throw new InvalidRequestException( "column name length must not be greater than " + IColumn.MAX_NAME_LENGTH); if (name.remaining() == 0) throw new InvalidRequestException("column name must not be empty"); try { comparator.validate(name); } catch (MarshalException e) { throw new InvalidRequestException(e.getMessage()); } } }
private Integer seekToSubColumn( CFMetaData metadata, FileDataInput file, ByteBuffer sblockId, List<IndexHelper.IndexInfo> indexList) throws IOException { file.readInt(); // column count /* get the various column ranges we have to read */ AbstractType comparator = metadata.comparator; int index = IndexHelper.indexFor(sblockId, indexList, comparator, false); if (index == indexList.size()) return null; IndexHelper.IndexInfo indexInfo = indexList.get(index); if (comparator.compare(sblockId, indexInfo.firstName) < 0) return null; FileMark mark = file.mark(); FileUtils.skipBytesFully(file, indexInfo.offset); while (file.bytesPastMark(mark) < indexInfo.offset + indexInfo.width) { Integer dataLength = isSubBlockFound(metadata, file, sblockId); if (dataLength == null) return null; if (dataLength < 0) continue; return dataLength; } return null; }
private CFRowAdder add(CellName name, ColumnDefinition def, Object value) { if (value == null) { cf.addColumn(new BufferDeletedCell(name, ldt, timestamp)); } else { AbstractType valueType = def.type.isCollection() ? ((CollectionType) def.type).valueComparator() : def.type; cf.addColumn( new BufferCell( name, value instanceof ByteBuffer ? (ByteBuffer) value : valueType.decompose(value), timestamp)); } return this; }
private String rangesAsString() { assert !ranges.isEmpty(); StringBuilder sb = new StringBuilder(); AbstractType at = (AbstractType) ranges.comparator(); assert at != null; for (RangeTombstone i : ranges) { sb.append("["); sb.append(at.getString(i.min)).append("-"); sb.append(at.getString(i.max)).append(", "); sb.append(i.data); sb.append("]"); } return sb.toString(); }
private String rangesAsString() { assert !ranges.isEmpty(); StringBuilder sb = new StringBuilder(); AbstractType at = (AbstractType) ranges.comparator(); assert at != null; Iterator<RangeTombstone> iter = rangeIterator(); while (iter.hasNext()) { RangeTombstone i = iter.next(); sb.append("["); sb.append(at.getString(i.min)).append("-"); sb.append(at.getString(i.max)).append(", "); sb.append(i.data); sb.append("]"); } return sb.toString(); }
/** * Returns a {@code String} representation of {@code byteBuffer} validated by {@code type}. * * @param byteBuffer the {@link ByteBuffer} to be converted to {@code String}. * @param type {@link AbstractType} of {@code byteBuffer}. * @return a {@code String} representation of {@code byteBuffer} validated by {@code type}. */ public static String toString(ByteBuffer byteBuffer, AbstractType<?> type) { if (type instanceof CompositeType) { CompositeType composite = (CompositeType) type; List<AbstractType<?>> types = composite.types; ByteBuffer[] components = composite.split(byteBuffer); StringBuilder sb = new StringBuilder(); for (int i = 0; i < components.length; i++) { AbstractType<?> componentType = types.get(i); ByteBuffer component = components[i]; sb.append(componentType.compose(component)); if (i < types.size() - 1) { sb.append(':'); } } return sb.toString(); } else { return type.compose(byteBuffer).toString(); } }
public static void validateColumn(String keyspace, ColumnParent column_parent, Column column) throws InvalidRequestException { validateTtl(column); validateColumns(keyspace, column_parent, Arrays.asList(column.name)); try { AbstractType validator = DatabaseDescriptor.getValueValidator(keyspace, column_parent.column_family, column.name); if (validator != null) validator.validate(column.value); } catch (MarshalException me) { throw new InvalidRequestException( String.format( "[%s][%s][%s] = [%s] failed validation (%s)", keyspace, column_parent.getColumn_family(), FBUtilities.bytesToHex(column.name), FBUtilities.bytesToHex(column.value), me.getMessage())); } }
public int compare(ByteBuffer o1, ByteBuffer o2) { // An empty byte buffer is always smaller if (o1.remaining() == 0) { return o2.remaining() == 0 ? 0 : -1; } if (o2.remaining() == 0) { return 1; } return baseType.compare(o2, o1); }
/** * Given the collection of columns in the Column Family, the name index is generated and written * into the provided stream * * @param columns for whom the name index needs to be generated * @param dos stream into which the serialized name index needs to be written. * @throws IOException */ private static void doIndexing( AbstractType comparator, Collection<IColumn> columns, DataOutput dos) throws IOException { if (columns.isEmpty()) { dos.writeInt(0); return; } /* * Maintains a list of ColumnIndexInfo objects for the columns in this * column family. The key is the column name and the position is the * relative offset of that column name from the start of the list. * We do this so that we don't read all the columns into memory. */ List<IndexHelper.IndexInfo> indexList = new ArrayList<IndexHelper.IndexInfo>(); int endPosition = 0, startPosition = -1; int indexSizeInBytes = 0; IColumn column = null, firstColumn = null; /* column offsets at the right thresholds into the index map. */ for (Iterator<IColumn> it = columns.iterator(); it.hasNext(); ) { column = it.next(); if (firstColumn == null) { firstColumn = column; startPosition = endPosition; } endPosition += column.serializedSize(); /* if we hit the column index size that we have to index after, go ahead and index it. */ if (endPosition - startPosition >= DatabaseDescriptor.getColumnIndexSize()) { IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo( firstColumn.name(), column.name(), startPosition, endPosition - startPosition); indexList.add(cIndexInfo); indexSizeInBytes += cIndexInfo.serializedSize(); firstColumn = null; } } // the last column may have fallen on an index boundary already. if not, index it explicitly. if (indexList.isEmpty() || comparator.compare(indexList.get(indexList.size() - 1).lastName, column.name()) != 0) { IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo( firstColumn.name(), column.name(), startPosition, endPosition - startPosition); indexList.add(cIndexInfo); indexSizeInBytes += cIndexInfo.serializedSize(); } assert indexSizeInBytes > 0; dos.writeInt(indexSizeInBytes); for (IndexHelper.IndexInfo cIndexInfo : indexList) { cIndexInfo.serialize(dos); } }
public static void validateRange(String keyspace, ColumnParent column_parent, SliceRange range) throws InvalidRequestException { AbstractType comparator = ColumnFamily.getComparatorFor( keyspace, column_parent.column_family, column_parent.super_column); try { comparator.validate(range.start); comparator.validate(range.finish); } catch (MarshalException e) { throw new InvalidRequestException(e.getMessage()); } if (range.count < 0) throw new InvalidRequestException("get_slice requires non-negative count"); Comparator<ByteBuffer> orderedComparator = range.isReversed() ? comparator.getReverseComparator() : comparator; if (range.start.remaining() > 0 && range.finish.remaining() > 0 && orderedComparator.compare(range.start, range.finish) > 0) { throw new InvalidRequestException( "range finish must come after start in the order of traversal"); } }
private boolean isColumnNeeded(IColumn column) { if (startColumn.remaining() == 0 && finishColumn.remaining() == 0) return true; else if (startColumn.remaining() == 0 && !reversed) return comparator.compare(column.name(), finishColumn) <= 0; else if (startColumn.remaining() == 0 && reversed) return comparator.compare(column.name(), finishColumn) >= 0; else if (finishColumn.remaining() == 0 && !reversed) return comparator.compare(column.name(), startColumn) >= 0; else if (finishColumn.remaining() == 0 && reversed) return comparator.compare(column.name(), startColumn) <= 0; else if (!reversed) return comparator.compare(column.name(), startColumn) >= 0 && comparator.compare(column.name(), finishColumn) <= 0; else // if reversed return comparator.compare(column.name(), startColumn) <= 0 && comparator.compare(column.name(), finishColumn) >= 0; }
@Override public Term fromJSONObject(Object parsed) throws MarshalException { if (parsed instanceof String) parsed = Json.decodeJson((String) parsed); if (!(parsed instanceof List)) throw new MarshalException( String.format( "Expected a list (representing a set), but got a %s: %s", parsed.getClass().getSimpleName(), parsed)); List list = (List) parsed; Set<Term> terms = new HashSet<>(list.size()); for (Object element : list) { if (element == null) throw new MarshalException("Invalid null element in set"); terms.add(elements.fromJSONObject(element)); } return new Sets.DelayedValue(elements, terms); }
static int compareListOrSet(AbstractType<?> elementsComparator, ByteBuffer o1, ByteBuffer o2) { // Note that this is only used if the collection is inside an UDT if (!o1.hasRemaining() || !o2.hasRemaining()) return o1.hasRemaining() ? 1 : o2.hasRemaining() ? -1 : 0; ByteBuffer bb1 = o1.duplicate(); ByteBuffer bb2 = o2.duplicate(); int size1 = CollectionSerializer.readCollectionSize(bb1, 3); int size2 = CollectionSerializer.readCollectionSize(bb2, 3); for (int i = 0; i < Math.min(size1, size2); i++) { ByteBuffer v1 = CollectionSerializer.readValue(bb1, 3); ByteBuffer v2 = CollectionSerializer.readValue(bb2, 3); int cmp = elementsComparator.compare(v1, v2); if (cmp != 0) return cmp; } return size1 == size2 ? 0 : (size1 < size2 ? -1 : 1); }
@Override public int compareTo(Token<byte[]> o) { return comparator.compare(token, o.token); }
@Override public String toString() { return comparator.getString(token); }
/* * We need to compare the CQL3 representation of the type because comparing * the AbstractType will fail for example if a UDT has been changed. * Reason is that UserType.equals() takes the field names and types into account. * Example CQL sequence that would fail when comparing AbstractType: * CREATE TYPE foo ... * CREATE FUNCTION bar ( par foo ) RETURNS foo ... * ALTER TYPE foo ADD ... * or * ALTER TYPE foo ALTER ... * or * ALTER TYPE foo RENAME ... */ public static boolean typeEquals(AbstractType<?> t1, AbstractType<?> t2) { return t1.asCQL3Type().toString().equals(t2.asCQL3Type().toString()); }
public static int typeHashCode(AbstractType<?> t) { return t.asCQL3Type().toString().hashCode(); }
private ListType(AbstractType<T> elements) { super(Kind.LIST); this.elements = elements; this.serializer = ListSerializer.getInstance(elements.getSerializer()); }
public boolean references(AbstractType<?> check) { return super.references(check) || baseType.references(check); }
public TypeSerializer<T> getSerializer() { return baseType.getSerializer(); }
@Override public CQL3Type asCQL3Type() { return baseType.asCQL3Type(); }
public ByteBuffer fromString(String source) { return baseType.fromString(source); }
public String getString(ByteBuffer bytes) { return baseType.getString(bytes); }
public static DecoratedKey dk(String key, AbstractType type) { return StorageService.getPartitioner().decorateKey(type.fromString(key)); }
public SetType(AbstractType<T> elements, boolean isMultiCell) { super(ComparisonType.CUSTOM, Kind.SET); this.elements = elements; this.serializer = SetSerializer.getInstance(elements.getSerializer(), elements); this.isMultiCell = isMultiCell; }