@Override
    public T next() throws IOException {
      Tuple value = tupleInput.next(this.value);
      if (value != null) {
        this.value = value;
        long pointer = value.<Long>getField(pointerPos);

        recordsInputs.seek(pointer);
        return serializer.deserialize(recordsInputs);
      } else {
        return null;
      }
    }
Exemplo n.º 2
0
  /**
   * Transforms a Storm tuple into a Flink tuple of type {@code OUT} and emits this tuple via {@link
   * #doEmit(Object)} to the specified output stream.
   *
   * @param The The output stream id.
   * @param tuple The Storm tuple to be emitted.
   * @return the return value of {@link #doEmit(Object)}
   */
  @SuppressWarnings("unchecked")
  protected final List<Integer> tansformAndEmit(final String streamId, final List<Object> tuple) {
    List<Integer> taskIds;

    int numAtt = this.numberOfAttributes.get(streamId);
    int taskIdIdx = numAtt;
    if (this.taskId >= 0 && numAtt < 0) {
      numAtt = 1;
      taskIdIdx = 0;
    }
    if (numAtt >= 0) {
      assert (tuple.size() == numAtt);
      Tuple out = this.outputTuple.get(streamId);
      for (int i = 0; i < numAtt; ++i) {
        out.setField(tuple.get(i), i);
      }
      if (this.taskId >= 0) {
        out.setField(this.taskId, taskIdIdx);
      }
      if (this.split) {
        this.splitTuple.streamId = streamId;
        this.splitTuple.value = out;

        taskIds = doEmit((OUT) this.splitTuple);
      } else {
        taskIds = doEmit((OUT) out);
      }

    } else {
      assert (tuple.size() == 1);
      if (this.split) {
        this.splitTuple.streamId = streamId;
        this.splitTuple.value = tuple.get(0);

        taskIds = doEmit((OUT) this.splitTuple);
      } else {
        taskIds = doEmit((OUT) tuple.get(0));
      }
    }
    this.tupleEmitted = true;

    return taskIds;
  }
Exemplo n.º 3
0
  /**
   * Instantiates a new {@link AbstractStormCollector} that emits Flink tuples via {@link
   * #doEmit(Object)}. If the number of attributes is negative, any output type is supported (ie,
   * raw type). If the number of attributes is between 0 and 25, the output type is {@link Tuple0}
   * to {@link Tuple25}, respectively.
   *
   * @param numberOfAttributes The number of attributes of the emitted tuples per output stream.
   * @param taskId The ID of the producer task (negative value for unknown).
   * @throws UnsupportedOperationException if the specified number of attributes is greater than 25
   *     or taskId support is enabled for a raw stream
   */
  AbstractStormCollector(final HashMap<String, Integer> numberOfAttributes, final int taskId)
      throws UnsupportedOperationException {
    assert (numberOfAttributes != null);

    this.numberOfAttributes = numberOfAttributes;
    this.split = this.numberOfAttributes.size() > 1;
    this.taskId = taskId;

    for (Entry<String, Integer> outputStream : numberOfAttributes.entrySet()) {
      int numAtt = outputStream.getValue();

      if (this.taskId >= 0) {
        if (numAtt < 0) {
          throw new UnsupportedOperationException(
              "Task ID transmission not supported for raw streams: " + outputStream.getKey());
        }
        ++numAtt;
      }

      if (numAtt > 25) {
        if (this.taskId >= 0) {
          throw new UnsupportedOperationException(
              "Flink cannot handle more then 25 attributes, but 25 (24 plus 1 for produer task ID) "
                  + " are declared for stream '"
                  + outputStream.getKey()
                  + "' by the given bolt.");
        } else {
          throw new UnsupportedOperationException(
              "Flink cannot handle more then 25 attributes, but "
                  + numAtt
                  + " are declared for stream '"
                  + outputStream.getKey()
                  + "' by the given bolt.");
        }
      } else if (numAtt >= 0) {
        try {
          this.outputTuple.put(
              outputStream.getKey(),
              org.apache.flink.api.java.tuple.Tuple.getTupleClass(numAtt).newInstance());
        } catch (final InstantiationException e) {
          throw new RuntimeException(e);
        } catch (final IllegalAccessException e) {
          throw new RuntimeException(e);
        }
      }
    }
  }
 @SuppressWarnings("unchecked")
 public TupleTypeInfo(TypeInformation<?>... types) {
   this((Class<T>) Tuple.getTupleClass(types.length), types);
 }
  @SuppressWarnings("unchecked")
  public long addRecord(T record) throws IOException {

    if (recordsOutFile == null) {

      if (closed) {
        throw new IllegalStateException("The large record handler has been closed.");
      }
      if (recordsReader != null) {
        throw new IllegalStateException("The handler has already switched to sorting.");
      }

      LOG.debug("Initializing the large record spilling...");

      // initialize the utilities
      {
        final TypeComparator<?>[] keyComps = comparator.getFlatComparators();
        numKeyFields = keyComps.length;
        Object[] keyHolder = new Object[numKeyFields];

        comparator.extractKeys(record, keyHolder, 0);

        TypeSerializer<?>[] keySers = new TypeSerializer<?>[numKeyFields];
        TypeSerializer<?>[] tupleSers = new TypeSerializer<?>[numKeyFields + 1];

        int[] keyPos = new int[numKeyFields];

        for (int i = 0; i < numKeyFields; i++) {
          keyPos[i] = i;
          keySers[i] = createSerializer(keyHolder[i], i);
          tupleSers[i] = keySers[i];
        }
        // add the long serializer for the offset
        tupleSers[numKeyFields] = LongSerializer.INSTANCE;

        keySerializer =
            new TupleSerializer<Tuple>(
                (Class<Tuple>) Tuple.getTupleClass(numKeyFields + 1), tupleSers);
        keyComparator = new TupleComparator<Tuple>(keyPos, keyComps, keySers);

        keySerializerFactory =
            new RuntimeSerializerFactory<Tuple>(keySerializer, keySerializer.getTupleClass());

        keyTuple = keySerializer.createInstance();
      }

      // initialize the spilling
      final int totalNumSegments = memory.size();
      final int segmentsForKeys =
          (totalNumSegments >= 2 * MAX_SEGMENTS_FOR_KEY_SPILLING)
              ? MAX_SEGMENTS_FOR_KEY_SPILLING
              : Math.max(
                  MIN_SEGMENTS_FOR_KEY_SPILLING, totalNumSegments - MAX_SEGMENTS_FOR_KEY_SPILLING);

      List<MemorySegment> recordsMemory = new ArrayList<MemorySegment>();
      List<MemorySegment> keysMemory = new ArrayList<MemorySegment>();

      for (int i = 0; i < segmentsForKeys; i++) {
        keysMemory.add(memory.get(i));
      }
      for (int i = segmentsForKeys; i < totalNumSegments; i++) {
        recordsMemory.add(memory.get(i));
      }

      recordsChannel = ioManager.createChannel();
      keysChannel = ioManager.createChannel();

      recordsOutFile =
          new FileChannelOutputView(
              ioManager.createBlockChannelWriter(recordsChannel),
              memManager,
              recordsMemory,
              memManager.getPageSize());

      keysOutFile =
          new FileChannelOutputView(
              ioManager.createBlockChannelWriter(keysChannel),
              memManager,
              keysMemory,
              memManager.getPageSize());
    }

    final long offset = recordsOutFile.getWriteOffset();
    if (offset < 0) {
      throw new RuntimeException("wrong offset");
    }

    Object[] keyHolder = new Object[numKeyFields];

    comparator.extractKeys(record, keyHolder, 0);
    for (int i = 0; i < numKeyFields; i++) {
      keyTuple.setField(keyHolder[i], i);
    }
    keyTuple.setField(offset, numKeyFields);

    keySerializer.serialize(keyTuple, keysOutFile);
    serializer.serialize(record, recordsOutFile);

    recordCounter++;

    return offset;
  }