@Override public Map<TableName, SequenceValue> initSequences( String tenantId, List<TableName> sequences, long timestamp) throws SQLException { Map<TableName, SequenceValue> sequenceMap = Maps.newHashMapWithExpectedSize(sequences.size()); for (TableName name : sequences) { PSequence sequence = metaData.getSequence(name); if (sequence == null) { throw new SequenceNotFoundException(name.getSchemaName(), name.getTableName()); } SequenceValue existingValue = sequenceMap.get(name); SequenceValue value = new SequenceValue(sequence); if (existingValue != null) { value.currentValue = existingValue.currentValue; value.nextValue = existingValue.nextValue; } sequenceMap.put(name, value); } return sequenceMap; }
@Override public Long createSequence( String tenantId, String schemaName, String sequenceName, long startWith, long incrementBy, long timestamp) throws SQLException { TableName tableName = TableName.create(schemaName, sequenceName); if (metaData.getSequence(tableName) != null) { return null; } addSequence(tableName, new PSequenceImpl(incrementBy, startWith, timestamp)); SequenceValue value = new SequenceValue(incrementBy, startWith, timestamp); value.currentValue = startWith; value.nextValue = startWith; sequenceMap.put(tableName, value); return HConstants.LATEST_TIMESTAMP; }
@Override public List<TableName> reserveSequences( String tenantId, Set<Map.Entry<TableName, SequenceValue>> sequences, int batchSize, long timestamp) throws SQLException { List<TableName> droppedSequences = Lists.newArrayListWithExpectedSize(sequences.size()); for (Map.Entry<TableName, SequenceValue> entry : sequences) { SequenceValue value = entry.getValue(); if (value.currentValue == value.nextValue) { if (metaData.getSequence(entry.getKey()) == null) { droppedSequences.add(entry.getKey()); continue; } value.nextValue += value.incrementBy * batchSize; } } return droppedSequences; }