@Test public void testTemporalCompactionSpanningDays() throws Exception { long day1 = Duration.ofDays(Duration.ofNanos(System.nanoTime()).toDays()).toMillis(); long day2 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 1).toMillis(); long day3 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 2).toMillis(); long day4 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 3).toMillis(); List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithTemporalRange(TIMESTAMP, day1, day3), // day2 shardWithTemporalRange(TIMESTAMP, day2, day2), // day2 shardWithTemporalRange(TIMESTAMP, day1, day1), // day1 shardWithTemporalRange(TIMESTAMP, day1 + 100, day2 + 100), // day1 shardWithTemporalRange(TIMESTAMP, day1 - 100, day2 - 100), // day1 shardWithTemporalRange(TIMESTAMP, day2 - 100, day3 - 100), // day2 shardWithTemporalRange(TIMESTAMP, day1, day4) // day2 ); long tableId = temporalTableInfo.getTableId(); Set<OrganizationSet> compactionSets = compactionSetCreator.createCompactionSets(temporalTableInfo, inputShards); assertEquals(compactionSets.size(), 2); Set<OrganizationSet> expected = ImmutableSet.of( new OrganizationSet( tableId, extractIndexes(inputShards, 0, 1, 5, 6), OptionalInt.empty()), new OrganizationSet( tableId, extractIndexes(inputShards, 2, 3, 4), OptionalInt.empty())); assertEquals(compactionSets, expected); }
@Test public void testTemporalCompactionNoCompactionAcrossDays() throws Exception { long day1 = Duration.ofDays(Duration.ofNanos(System.nanoTime()).toDays()).toMillis(); long day2 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 1).toMillis(); long day3 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 2).toMillis(); List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithTemporalRange(TIMESTAMP, day1, day1), shardWithTemporalRange(TIMESTAMP, day2, day2), shardWithTemporalRange(TIMESTAMP, day2, day2), shardWithTemporalRange(TIMESTAMP, day1, day1), shardWithTemporalRange(TIMESTAMP, day3, day3)); Set<OrganizationSet> actual = compactionSetCreator.createCompactionSets(temporalTableInfo, inputShards); assertEquals(actual.size(), 2); Set<OrganizationSet> expected = ImmutableSet.of( new OrganizationSet( temporalTableInfo.getTableId(), extractIndexes(inputShards, 0, 3), OptionalInt.empty()), new OrganizationSet( temporalTableInfo.getTableId(), extractIndexes(inputShards, 1, 2), OptionalInt.empty())); assertEquals(actual, expected); }
@Test public void testTemporalCompactionDate() throws Exception { long day1 = Duration.ofNanos(System.nanoTime()).toDays(); long day2 = day1 + 1; long day3 = day1 + 2; List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithTemporalRange(DATE, day1, day1), shardWithTemporalRange(DATE, day2, day2), shardWithTemporalRange(DATE, day3, day3), shardWithTemporalRange(DATE, day1, day3), shardWithTemporalRange(DATE, day2, day3), shardWithTemporalRange(DATE, day1, day2)); long tableId = temporalTableInfo.getTableId(); Set<OrganizationSet> actual = compactionSetCreator.createCompactionSets(temporalTableInfo, inputShards); assertEquals(actual.size(), 2); Set<OrganizationSet> expected = ImmutableSet.of( new OrganizationSet(tableId, extractIndexes(inputShards, 0, 3, 5), OptionalInt.empty()), new OrganizationSet(tableId, extractIndexes(inputShards, 1, 4), OptionalInt.empty())); assertEquals(actual, expected); }
public synchronized PageBufferClientStatus getStatus() { String state; if (closed) { state = "closed"; } else if (future != null) { state = "running"; } else if (scheduled) { state = "scheduled"; } else if (completed) { state = "completed"; } else { state = "queued"; } String httpRequestState = "not scheduled"; if (future != null) { httpRequestState = future.getState(); } long rejectedRows = rowsRejected.get(); int rejectedPages = pagesRejected.get(); return new PageBufferClientStatus( location, state, lastUpdate, rowsReceived.get(), pagesReceived.get(), rejectedRows == 0 ? OptionalLong.empty() : OptionalLong.of(rejectedRows), rejectedPages == 0 ? OptionalInt.empty() : OptionalInt.of(rejectedPages), requestsScheduled.get(), requestsCompleted.get(), requestsFailed.get(), httpRequestState); }
public OptionalInt amount(String compound) { Integer amount = compounds.get(compound); if (amount != null) { return OptionalInt.of(amount); } else { return OptionalInt.empty(); } }
private static ConnectorPageSource getPageSource( OrcStorageManager manager, List<Long> columnIds, List<Type> columnTypes, UUID uuid, TupleDomain<RaptorColumnHandle> tupleDomain) { return manager.getPageSource( uuid, OptionalInt.empty(), columnIds, columnTypes, tupleDomain, READER_ATTRIBUTES); }
public Stream<T> parse(InputStream is, CsvErrorSniffer context) { ObjectMapper mapper = objectMapper.copy(); formatter.initMixIn(mapper); ObjectReader reader = mapper.readerFor(formatter.getTargetClass()); CsvSchema schema = new CsvSchema(formatter); return parseToCsvLine(is) .map( line -> { line.getException() .ifPresent( e -> context.mark(new Location(line.getLineNumber(), OptionalInt.empty()))); Set<String> ignoreField = new HashSet<>(); while (true) { try { return reader.readValue(schema.toJson(line, ignoreField)); } catch (JsonMappingException e) { String path = buildPath(e.getPath()); ; Location location = new Location( line.getLineNumber(), OptionalInt.of(schema.getColumnNumber(path))); if (context.contains(location)) { throw new IllegalStateException("invalid row state: " + e.getLocation()); } context.mark(location); ignoreField.add(path); } catch (IOException e) { context.mark(new Location(line.getLineNumber(), OptionalInt.empty())); try { return formatter.getTargetClass().newInstance(); } catch (ReflectiveOperationException e2) { throw new ReflectiveOperationRuntimeException(e2); } } } }); }
@Test public void testJsonRoundTrip() { Properties schema = new Properties(); schema.setProperty("foo", "bar"); schema.setProperty("bar", "baz"); ImmutableList<HivePartitionKey> partitionKeys = ImmutableList.of( new HivePartitionKey("a", HIVE_STRING, "apple"), new HivePartitionKey("b", HiveType.HIVE_LONG, "42")); ImmutableList<HostAddress> addresses = ImmutableList.of( HostAddress.fromParts("127.0.0.1", 44), HostAddress.fromParts("127.0.0.1", 45)); HiveSplit expected = new HiveSplit( "clientId", "db", "table", "partitionId", "path", 42, 88, schema, partitionKeys, addresses, OptionalInt.empty(), true, TupleDomain.<HiveColumnHandle>all(), ImmutableMap.of(1, HIVE_STRING)); String json = codec.toJson(expected); HiveSplit actual = codec.fromJson(json); assertEquals(actual.getClientId(), expected.getClientId()); assertEquals(actual.getDatabase(), expected.getDatabase()); assertEquals(actual.getTable(), expected.getTable()); assertEquals(actual.getPartitionName(), expected.getPartitionName()); assertEquals(actual.getPath(), expected.getPath()); assertEquals(actual.getStart(), expected.getStart()); assertEquals(actual.getLength(), expected.getLength()); assertEquals(actual.getSchema(), expected.getSchema()); assertEquals(actual.getPartitionKeys(), expected.getPartitionKeys()); assertEquals(actual.getAddresses(), expected.getAddresses()); assertEquals(actual.getColumnCoercions(), expected.getColumnCoercions()); assertEquals(actual.isForceLocalScheduling(), expected.isForceLocalScheduling()); }
@Override public OptionalInt findAny() { Integer result = performOperation( TerminalFunctions.findAnyIntFunction(), false, (i1, i2) -> { if (i1 != null) { return i1; } else { return i2; } }, a -> a != null); if (result != null) { return OptionalInt.of(result); } else { return OptionalInt.empty(); } }
@Test public void testAndInt() { List<Integer> ints = Arrays.asList(0b1100, 0b0110, 0b101110, 0b11110011); Collector<Integer, ?, OptionalInt> collector = MoreCollectors.andingInt(Integer::intValue); checkShortCircuitCollector("andInt", OptionalInt.of(0), 4, ints::stream, collector); checkCollectorEmpty("andIntEmpty", OptionalInt.empty(), collector); assertEquals( OptionalInt.of(0), IntStreamEx.iterate(16384, i -> i + 1).parallel().boxed().collect(collector)); assertEquals( OptionalInt.of(16384), IntStreamEx.iterate(16384, i -> i + 1).parallel().limit(16383).boxed().collect(collector)); Collector<Integer, ?, Integer> unwrapped = MoreCollectors.collectingAndThen( MoreCollectors.andingInt(Integer::intValue), OptionalInt::getAsInt); assertTrue(unwrapped.characteristics().contains(Characteristics.UNORDERED)); checkShortCircuitCollector("andIntUnwrapped", 0, 4, ints::stream, unwrapped); checkShortCircuitCollector( "andIntUnwrapped", 0, 2, Arrays.asList(0x1, 0x10, 0x100)::stream, unwrapped); }
@Test public void testRewriter() throws Exception { OrcStorageManager manager = createOrcStorageManager(); long transactionId = TRANSACTION_ID; List<Long> columnIds = ImmutableList.of(3L, 7L); List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, createVarcharType(10)); // create file with 2 rows StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); List<Page> pages = rowPagesBuilder(columnTypes).row(123L, "hello").row(456L, "bye").build(); sink.appendPages(pages); List<ShardInfo> shards = getFutureValue(sink.commit()); assertEquals(shardRecorder.getShards().size(), 1); // delete one row BitSet rowsToDelete = new BitSet(); rowsToDelete.set(0); Collection<Slice> fragments = manager.rewriteShard( transactionId, OptionalInt.empty(), shards.get(0).getShardUuid(), rowsToDelete); Slice shardDelta = Iterables.getOnlyElement(fragments); ShardDelta shardDeltas = jsonCodec(ShardDelta.class).fromJson(shardDelta.getBytes()); ShardInfo shardInfo = Iterables.getOnlyElement(shardDeltas.getNewShards()); // check that output file has one row assertEquals(shardInfo.getRowCount(), 1); // check that storage file is same as backup file File storageFile = storageService.getStorageFile(shardInfo.getShardUuid()); File backupFile = fileBackupStore.getBackupFile(shardInfo.getShardUuid()); assertFileEquals(storageFile, backupFile); // verify recorded shard List<RecordedShard> recordedShards = shardRecorder.getShards(); assertEquals(recordedShards.size(), 2); assertEquals(recordedShards.get(1).getTransactionId(), TRANSACTION_ID); assertEquals(recordedShards.get(1).getShardUuid(), shardInfo.getShardUuid()); }
/** Compute split-per-shard (separate split for each shard). */ private BucketShards compute() throws SQLException { if (!resultSet.next()) { return endOfData(); } UUID shardUuid = uuidFromBytes(resultSet.getBytes("shard_uuid")); Set<String> nodeIdentifiers; OptionalInt bucketNumber = OptionalInt.empty(); if (bucketToNode != null) { int bucket = resultSet.getInt("bucket_number"); bucketNumber = OptionalInt.of(bucket); nodeIdentifiers = ImmutableSet.of(getBucketNode(bucket)); } else { List<Integer> nodeIds = intArrayFromBytes(resultSet.getBytes("node_ids")); nodeIdentifiers = getNodeIdentifiers(nodeIds, shardUuid); } ShardNodes shard = new ShardNodes(shardUuid, nodeIdentifiers); return new BucketShards(bucketNumber, ImmutableSet.of(shard)); }
@Override public OptionalInt reduce(IntBinaryOperator op) { Integer result = performOperation( TerminalFunctions.reduceFunction(op), true, (i1, i2) -> { if (i1 != null) { if (i2 != null) { return op.applyAsInt(i1, i2); } return i1; } return i2; }, null); if (result == null) { return OptionalInt.empty(); } else { return OptionalInt.of(result); } }
@Override public OptionalInt max() { Integer value = performOperation( TerminalFunctions.maxIntFunction(), false, (i1, i2) -> { if (i1 != null) { if (i2 != null) { return i1 > i2 ? i1 : i2; } return i1; } return i2; }, null); if (value == null) { return OptionalInt.empty(); } else { return OptionalInt.of(value); } }
/** * Default JSON-P implementation of a skill traited fact. * * @author Fabrice Bouyé */ public final class JsonpSkillTraitedFact implements SkillTraitedFact { @LocalizedResource private String text = LocalizedResource.DEFAULT; @URLValue private URLReference icon = URLReference.empty(); @EnumValue private SkillFactType type = SkillFactType.UNKNOWN; @IdValue private int requiredTrait = IdValue.DEFAULT_INTEGER_ID; @OptionalValue @QuantityValue private OptionalInt overrides = OptionalInt.empty(); /** Creates a new empty instance. */ public JsonpSkillTraitedFact() {} @Override public String getText() { return text; } @Override public URLReference getIcon() { return icon; } @Override public SkillFactType getType() { return type; } @Override public int getRequiredTrait() { return requiredTrait; } @Override public OptionalInt getOverrides() { return overrides; } }
public static void main(String[] args) throws Exception { if (args.length < 2) { System.err.println("Ahem, I need at least 2 arguments please."); System.exit(1); return; } Pattern scalePat = Pattern.compile("-(?:-scale|s)(?:-(w|h))?=(\\d+)"); @SuppressWarnings("unchecked") Predicate<String> isVerbose = (Predicate<String>) (Object) Predicate.isEqual("-v").or(Predicate.isEqual("--verbose")); boolean verbose = Stream.of(args).anyMatch(isVerbose); OptionalInt[] packed = Stream.of(args) .map(scalePat::matcher) .filter(Matcher::matches) .map( matcher -> { OptionalInt[] ints = new OptionalInt[] {OptionalInt.empty(), OptionalInt.empty()}; OptionalInt scale = OptionalInt.of(Integer.parseInt(matcher.group(2))); if (matcher.group(1) == null) { ints[0] = ints[1] = scale; } else { if (matcher.group(1).equals("w")) { ints[0] = scale; } else { ints[1] = scale; } } return ints; }) .reduce( new OptionalInt[] {OptionalInt.empty(), OptionalInt.empty()}, (id, next) -> { OptionalInt[] ints = new OptionalInt[2]; OptionalInt aID = id[0]; OptionalInt bID = id[1]; OptionalInt aNx = next[0]; OptionalInt bNx = next[1]; ints[0] = aNx.isPresent() ? aNx : aID; ints[1] = bNx.isPresent() ? bNx : bID; return ints; }); int scaleWidth = packed[0].orElse(1); int scaleHeight = packed[1].orElse(1); Pattern lightPat = Pattern.compile("-(?:-light|l)=([dDbB]+)"); List<Function<Color, Color>> lightChanges = Stream.of(args) .map(lightPat::matcher) .filter(Matcher::matches) .flatMap( m -> m.group(1) .chars() .mapToObj( c -> c == 'd' || c == 'D' ? (Function<Color, Color>) Color::darker : (Function<Color, Color>) Color::brighter)) .collect(Collectors.toList()); args = Stream.of(args) .filter(isVerbose.or(scalePat.asPredicate()).or(lightPat.asPredicate()).negate()) .toArray(String[]::new); Random r = new Random(); int width = Integer.parseInt(args[0]); int height = Integer.parseInt(args[1]); String file = args.length > 2 ? args[2] : ":-"; if (verbose) { System.err.println("Generating an image..."); } byte[] pixels = new byte[width * height * 4]; r.nextBytes(pixels); BufferedImage created = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); for (int i = 0, index = 0; i < pixels.length; i += 4, index++) { int x = index % width; int y = index / width; Color color = new Color( ((/* 0x0F */ 0xFF) << 24) | ((pixels[i + 1] & 0xFF) << 16) | ((pixels[i + 2] & 0xFF) << 8) | ((pixels[i + 3] & 0xFF))); for (Function<Color, Color> change : lightChanges) { color = change.apply(color); } created.setRGB(x, y, color.getRGB()); } int scaledWidth = width * scaleWidth; int scaledHeight = height * scaleHeight; BufferedImage tmp = new BufferedImage(scaledWidth, scaledHeight, BufferedImage.TYPE_INT_ARGB); Graphics2D g = tmp.createGraphics(); AffineTransform at = AffineTransform.getScaleInstance(scaleWidth, scaleHeight); g.drawRenderedImage(created, at); created = tmp; if (verbose) { System.err.println("Writing to file..."); } ImageIO.write( created, "PNG", file.equals(":-") ? System.out : Files.newOutputStream(Paths.get(file))); if (verbose) { System.err.println("Complete..."); } }
/* select orderkey, quantity, totalprice from lineitem join orders using (orderkey) */ @Override protected List<Driver> createDrivers(TaskContext taskContext) { ImmutableList.Builder<OperatorFactory> driversBuilder = ImmutableList.builder(); driversBuilder.add(ordersTableScan); OperatorFactory source = ordersTableScan; Optional<Integer> hashChannel = Optional.empty(); if (hashEnabled) { source = createHashProjectOperator(1, new PlanNodeId("test"), ImmutableList.of(BIGINT, DOUBLE)); driversBuilder.add(source); hashChannel = Optional.of(2); } // hash build HashBuilderOperatorFactory hashBuilder = new HashBuilderOperatorFactory( 2, new PlanNodeId("test"), source.getTypes(), ImmutableMap.of(), Ints.asList(0), hashChannel, false, Optional.empty(), 1_500_000, 1); driversBuilder.add(hashBuilder); DriverFactory hashBuildDriverFactory = new DriverFactory(true, false, driversBuilder.build(), OptionalInt.empty()); Driver hashBuildDriver = hashBuildDriverFactory.createDriver( taskContext.addPipelineContext(true, false).addDriverContext()); hashBuildDriverFactory.close(); // join ImmutableList.Builder<OperatorFactory> joinDriversBuilder = ImmutableList.builder(); joinDriversBuilder.add(lineItemTableScan); source = lineItemTableScan; hashChannel = Optional.empty(); if (hashEnabled) { source = createHashProjectOperator(1, new PlanNodeId("test"), ImmutableList.of(BIGINT, BIGINT)); joinDriversBuilder.add(source); hashChannel = Optional.of(2); } OperatorFactory joinOperator = LookupJoinOperators.innerJoin( 2, new PlanNodeId("test"), hashBuilder.getLookupSourceFactory(), source.getTypes(), Ints.asList(0), hashChannel, false); joinDriversBuilder.add(joinOperator); joinDriversBuilder.add( new NullOutputOperatorFactory(3, new PlanNodeId("test"), joinOperator.getTypes())); DriverFactory joinDriverFactory = new DriverFactory(true, true, joinDriversBuilder.build(), OptionalInt.empty()); Driver joinDriver = joinDriverFactory.createDriver( taskContext.addPipelineContext(true, true).addDriverContext()); joinDriverFactory.close(); return ImmutableList.of(hashBuildDriver, joinDriver); }
public class TestCompactionSetCreator { private static final long MAX_SHARD_ROWS = 100; private static final DataSize MAX_SHARD_SIZE = new DataSize(100, DataSize.Unit.BYTE); private static final Table tableInfo = new Table(1L, OptionalLong.empty(), OptionalInt.empty(), OptionalLong.empty()); private static final Table temporalTableInfo = new Table(1L, OptionalLong.empty(), OptionalInt.empty(), OptionalLong.of(1)); private static final Table bucketedTableInfo = new Table(1L, OptionalLong.empty(), OptionalInt.of(3), OptionalLong.empty()); private static final Table bucketedTemporalTableInfo = new Table(1L, OptionalLong.empty(), OptionalInt.of(3), OptionalLong.of(1)); private final CompactionSetCreator compactionSetCreator = new CompactionSetCreator(MAX_SHARD_SIZE, MAX_SHARD_ROWS); @Test public void testNonTemporalOrganizationSetSimple() throws Exception { List<ShardIndexInfo> inputShards = ImmutableList.of(shardWithSize(10, 10), shardWithSize(10, 10), shardWithSize(10, 10)); Set<OrganizationSet> compactionSets = compactionSetCreator.createCompactionSets(tableInfo, inputShards); assertEquals(compactionSets.size(), 1); assertEquals(getOnlyElement(compactionSets).getShards(), extractIndexes(inputShards, 0, 1, 2)); } @Test public void testNonTemporalSizeBasedOrganizationSet() throws Exception { List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithSize(10, 70), shardWithSize(10, 20), shardWithSize(10, 30), shardWithSize(10, 120)); Set<OrganizationSet> compactionSets = compactionSetCreator.createCompactionSets(tableInfo, inputShards); Set<UUID> actual = new HashSet<>(); for (OrganizationSet set : compactionSets) { actual.addAll(set.getShards()); } assertTrue(extractIndexes(inputShards, 0, 1, 2).containsAll(actual)); } @Test public void testNonTemporalRowCountBasedOrganizationSet() throws Exception { List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithSize(50, 10), shardWithSize(100, 10), shardWithSize(20, 10), shardWithSize(30, 10)); Set<OrganizationSet> compactionSets = compactionSetCreator.createCompactionSets(tableInfo, inputShards); Set<UUID> actual = new HashSet<>(); for (OrganizationSet set : compactionSets) { actual.addAll(set.getShards()); } assertTrue(extractIndexes(inputShards, 0, 2, 3).containsAll(actual)); } @Test public void testTemporalCompactionNoCompactionAcrossDays() throws Exception { long day1 = Duration.ofDays(Duration.ofNanos(System.nanoTime()).toDays()).toMillis(); long day2 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 1).toMillis(); long day3 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 2).toMillis(); List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithTemporalRange(TIMESTAMP, day1, day1), shardWithTemporalRange(TIMESTAMP, day2, day2), shardWithTemporalRange(TIMESTAMP, day2, day2), shardWithTemporalRange(TIMESTAMP, day1, day1), shardWithTemporalRange(TIMESTAMP, day3, day3)); Set<OrganizationSet> actual = compactionSetCreator.createCompactionSets(temporalTableInfo, inputShards); assertEquals(actual.size(), 2); Set<OrganizationSet> expected = ImmutableSet.of( new OrganizationSet( temporalTableInfo.getTableId(), extractIndexes(inputShards, 0, 3), OptionalInt.empty()), new OrganizationSet( temporalTableInfo.getTableId(), extractIndexes(inputShards, 1, 2), OptionalInt.empty())); assertEquals(actual, expected); } @Test public void testTemporalCompactionSpanningDays() throws Exception { long day1 = Duration.ofDays(Duration.ofNanos(System.nanoTime()).toDays()).toMillis(); long day2 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 1).toMillis(); long day3 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 2).toMillis(); long day4 = Duration.ofDays(Duration.ofMillis(day1).toDays() + 3).toMillis(); List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithTemporalRange(TIMESTAMP, day1, day3), // day2 shardWithTemporalRange(TIMESTAMP, day2, day2), // day2 shardWithTemporalRange(TIMESTAMP, day1, day1), // day1 shardWithTemporalRange(TIMESTAMP, day1 + 100, day2 + 100), // day1 shardWithTemporalRange(TIMESTAMP, day1 - 100, day2 - 100), // day1 shardWithTemporalRange(TIMESTAMP, day2 - 100, day3 - 100), // day2 shardWithTemporalRange(TIMESTAMP, day1, day4) // day2 ); long tableId = temporalTableInfo.getTableId(); Set<OrganizationSet> compactionSets = compactionSetCreator.createCompactionSets(temporalTableInfo, inputShards); assertEquals(compactionSets.size(), 2); Set<OrganizationSet> expected = ImmutableSet.of( new OrganizationSet( tableId, extractIndexes(inputShards, 0, 1, 5, 6), OptionalInt.empty()), new OrganizationSet( tableId, extractIndexes(inputShards, 2, 3, 4), OptionalInt.empty())); assertEquals(compactionSets, expected); } @Test public void testTemporalCompactionDate() throws Exception { long day1 = Duration.ofNanos(System.nanoTime()).toDays(); long day2 = day1 + 1; long day3 = day1 + 2; List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithTemporalRange(DATE, day1, day1), shardWithTemporalRange(DATE, day2, day2), shardWithTemporalRange(DATE, day3, day3), shardWithTemporalRange(DATE, day1, day3), shardWithTemporalRange(DATE, day2, day3), shardWithTemporalRange(DATE, day1, day2)); long tableId = temporalTableInfo.getTableId(); Set<OrganizationSet> actual = compactionSetCreator.createCompactionSets(temporalTableInfo, inputShards); assertEquals(actual.size(), 2); Set<OrganizationSet> expected = ImmutableSet.of( new OrganizationSet(tableId, extractIndexes(inputShards, 0, 3, 5), OptionalInt.empty()), new OrganizationSet(tableId, extractIndexes(inputShards, 1, 4), OptionalInt.empty())); assertEquals(actual, expected); } @Test public void testBucketedTableCompaction() throws Exception { List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithBucket(1), shardWithBucket(2), shardWithBucket(2), shardWithBucket(1), shardWithBucket(2), shardWithBucket(1)); long tableId = bucketedTableInfo.getTableId(); Set<OrganizationSet> actual = compactionSetCreator.createCompactionSets(bucketedTableInfo, inputShards); assertEquals(actual.size(), 2); Set<OrganizationSet> expected = ImmutableSet.of( new OrganizationSet(tableId, extractIndexes(inputShards, 0, 3, 5), OptionalInt.of(1)), new OrganizationSet(tableId, extractIndexes(inputShards, 1, 2, 4), OptionalInt.of(2))); assertEquals(actual, expected); } static Set<UUID> extractIndexes(List<ShardIndexInfo> inputShards, int... indexes) { ImmutableSet.Builder<UUID> builder = ImmutableSet.builder(); for (int index : indexes) { builder.add(inputShards.get(index).getShardUuid()); } return builder.build(); } @Test public void testBucketedTemporalTableCompaction() throws Exception { long day1 = 1; long day2 = 2; long day3 = 3; long day4 = 4; List<ShardIndexInfo> inputShards = ImmutableList.of( shardWithTemporalBucket(OptionalInt.of(1), DATE, day1, day1), shardWithTemporalBucket(OptionalInt.of(2), DATE, day2, day2), shardWithTemporalBucket(OptionalInt.of(1), DATE, day1, day1), shardWithTemporalBucket(OptionalInt.of(2), DATE, day2, day2), shardWithTemporalBucket(OptionalInt.of(1), DATE, day3, day3), shardWithTemporalBucket(OptionalInt.of(2), DATE, day4, day4)); long tableId = bucketedTemporalTableInfo.getTableId(); Set<OrganizationSet> actual = compactionSetCreator.createCompactionSets(bucketedTemporalTableInfo, inputShards); assertEquals(actual.size(), 2); Set<OrganizationSet> expected = ImmutableSet.of( new OrganizationSet(tableId, extractIndexes(inputShards, 0, 2), OptionalInt.of(1)), new OrganizationSet(tableId, extractIndexes(inputShards, 1, 3), OptionalInt.of(2))); assertEquals(actual, expected); } private static ShardIndexInfo shardWithSize(long rows, long size) { return new ShardIndexInfo( 1, OptionalInt.empty(), UUID.randomUUID(), rows, size, Optional.empty(), Optional.empty()); } private static ShardIndexInfo shardWithTemporalRange(Type type, Long start, Long end) { return shardWithTemporalBucket(OptionalInt.empty(), type, start, end); } private static ShardIndexInfo shardWithBucket(int bucketNumber) { return new ShardIndexInfo( 1, OptionalInt.of(bucketNumber), UUID.randomUUID(), 1, 1, Optional.empty(), Optional.empty()); } private static ShardIndexInfo shardWithTemporalBucket( OptionalInt bucketNumber, Type type, Long start, Long end) { if (type.equals(DATE)) { return new ShardIndexInfo( 1, bucketNumber, UUID.randomUUID(), 1, 1, Optional.empty(), Optional.of( ShardRange.of(new Tuple(type, start.intValue()), new Tuple(type, end.intValue())))); } return new ShardIndexInfo( 1, bucketNumber, UUID.randomUUID(), 1, 1, Optional.empty(), Optional.of(ShardRange.of(new Tuple(type, start), new Tuple(type, end)))); } }
private static ShardIndexInfo shardWithTemporalRange(Type type, Long start, Long end) { return shardWithTemporalBucket(OptionalInt.empty(), type, start, end); }
private static ShardIndexInfo shardWithSize(long rows, long size) { return new ShardIndexInfo( 1, OptionalInt.empty(), UUID.randomUUID(), rows, size, Optional.empty(), Optional.empty()); }
public void test_of_dateAdjustment() { ValueStep test = ValueStep.of(date(2014, 6, 30), DELTA_MINUS_2000); assertEquals(test.getDate(), Optional.of(date(2014, 6, 30))); assertEquals(test.getPeriodIndex(), OptionalInt.empty()); assertEquals(test.getValue(), DELTA_MINUS_2000); }
@Override public OptionalInt fromJson(String jsonValue, Type type) { return NULL.equals(jsonValue) ? OptionalInt.empty() : OptionalInt.of(Integer.parseInt(jsonValue)); }
private static StoragePageSink createStoragePageSink( StorageManager manager, List<Long> columnIds, List<Type> columnTypes) { long transactionId = TRANSACTION_ID; return manager.createStoragePageSink( transactionId, OptionalInt.empty(), columnIds, columnTypes, false); }
/** @param step The step being added to the end of the pipeline. */ public StepAddedEvent(Step step) { this.step = checkNotNull(step, "Step can not be null"); this.index = OptionalInt.empty(); }