private void validate(final ByteBuf buffer) {
    final int rIdx = buffer.readerIndex();
    final int rLen = buffer.readableBytes();

    final int rLenC = rLen >= 2 ? rLen - 2 : 0;

    checkMessage(buffer.slice());
    checkPrintableAscii(buffer.slice(rIdx, rLenC));
  }
  @Override
  protected void decode(
      ChannelHandlerContext channelHandlerContext, FMLProxyPacket fmlProxyPacket, List<Object> out)
      throws Exception {
    ByteBuf payload = fmlProxyPacket.payload();
    byte discriminator = payload.readByte();
    Class<? extends AbstractPacketOld> clazz = this.packets.get(discriminator);
    if (clazz == null) {
      throw new NullPointerException("No packet registered for discriminator: " + discriminator);
    }

    AbstractPacketOld packet = clazz.newInstance();
    packet.decodeInto(channelHandlerContext, payload.slice());

    EntityPlayer player;
    switch (FMLCommonHandler.instance().getEffectiveSide()) {
      case CLIENT:
        player = PacketHandlerOld.getClientPlayer();
        packet.handleClientSide(player);
        break;

      case SERVER:
        INetHandler netHandler =
            channelHandlerContext.channel().attr(NetworkRegistry.NET_HANDLER).get();
        player = ((NetHandlerPlayServer) netHandler).playerEntity;
        packet.handleServerSide(player);
        break;

      default:
    }
    out.add(packet);
  }
  @Override
  public Subobject parseSubobject(final ByteBuf buffer, final boolean loose)
      throws PCEPDeserializerException {
    Preconditions.checkArgument(
        buffer != null && buffer.isReadable(),
        "Array of bytes is mandatory. Can't be null or empty.");
    if (buffer.readableBytes() < HEADER_LENGTH) {
      throw new PCEPDeserializerException(
          "Wrong length of array of bytes. Passed: "
              + buffer.readableBytes()
              + "; Expected: >"
              + HEADER_LENGTH
              + ".");
    }
    final BitArray reserved = BitArray.valueOf(buffer, FLAGS_SIZE);
    final short cType = buffer.readUnsignedByte();

    final LabelType labelType = this.registry.parseLabel(cType, buffer.slice());
    if (labelType == null) {
      throw new PCEPDeserializerException(
          "Unknown C-TYPE for ero label subobject. Passed: " + cType);
    }
    final LabelBuilder builder = new LabelBuilder();
    builder.setUniDirectional(reserved.get(U_FLAG_OFFSET));
    builder.setLabelType(labelType);
    return new SubobjectBuilder()
        .setLoose(loose)
        .setSubobjectType(new LabelCaseBuilder().setLabel(builder.build()).build())
        .build();
  }
  private void checkPrintableAscii(final ByteBuf buffer) {
    final ByteBuf tmpBuffer = buffer.slice();
    final int lowerBound = tmpBuffer.readerIndex();
    final int upperBound = tmpBuffer.writerIndex();

    for (int idx = lowerBound; idx < upperBound; ++idx) {
      checkPrintableAscii(tmpBuffer.getByte(idx));
    }
  }
  /**
   * Load a record batch from a single buffer.
   *
   * @param def The definition for the record batch.
   * @param buf The buffer that holds the data associated with the record batch
   * @return Whether or not the schema changed since the previous load.
   * @throws SchemaChangeException
   */
  public boolean load(RecordBatchDef def, ByteBuf buf) throws SchemaChangeException {
    //    logger.debug("Loading record batch with def {} and data {}", def, buf);
    this.valueCount = def.getRecordCount();
    boolean schemaChanged = schema == null;

    Map<MaterializedField, ValueVector> oldFields = Maps.newHashMap();
    for (VectorWrapper<?> w : container) {
      ValueVector v = w.getValueVector();
      oldFields.put(v.getField(), v);
    }

    VectorContainer newVectors = new VectorContainer();

    List<FieldMetadata> fields = def.getFieldList();

    int bufOffset = 0;
    for (FieldMetadata fmd : fields) {
      FieldDef fieldDef = fmd.getDef();
      ValueVector v = oldFields.remove(fieldDef);
      if (v != null) {
        container.add(v);
        continue;
      }

      // if we arrive here, we didn't have a matching vector.
      schemaChanged = true;
      MaterializedField m = new MaterializedField(fieldDef);
      v = TypeHelper.getNewVector(m, allocator);
      if (fmd.getValueCount() == 0) {
        v.clear();
      } else {
        v.load(fmd, buf.slice(bufOffset, fmd.getBufferLength()));
      }
      bufOffset += fmd.getBufferLength();
      newVectors.add(v);
    }

    if (!oldFields.isEmpty()) {
      schemaChanged = true;
      for (ValueVector v : oldFields.values()) {
        v.close();
      }
    }

    // rebuild the schema.
    SchemaBuilder b = BatchSchema.newBuilder();
    for (VectorWrapper<?> v : newVectors) {
      b.addField(v.getField());
    }
    b.setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE);
    this.schema = b.build();
    container = newVectors;
    return schemaChanged;
  }
 protected ByteBuf extractFrame(ChannelHandlerContext ctx, ByteBuf buffer, int index, int length) {
   // make a sliced buffer for reading full contents, then if enough data doesn't reached yet,
   // ReplayingDecoder will throw an error for replaying decode operation at this line.
   //
   // Don't create a new buffer with ctx.alloc().buffer() before enough data has come. It will not
   // be released (and leaked).
   // If sliced buffer is created successfully, enough data has come.
   ByteBuf slice = buffer.slice(index, length);
   ByteBuf frame = ctx.alloc().buffer(length);
   frame.writeBytes(slice, 0, length);
   return frame;
 }
 public static Packet deencapsulatePacket(ByteBuf buffer) throws IOException {
   String clazzName = (String) Packet.decodePrimitive(buffer);
   try {
     if (clazzName == null) throw new IOException("No class specified!");
     Class<? extends Packet> clazz = (Class<? extends Packet>) Class.forName(clazzName);
     Packet packet = clazz.newInstance();
     packet.readPacket(buffer.slice());
     return packet;
   } catch (Exception ex) {
     if (ex instanceof IOException) throw (IOException) ex;
     throw new IOException("Decoding exception", ex);
   }
 }
Example #8
0
 @Override
 public ByteBuf getChunk(int length) throws IOException {
   if (byteBuf == null || length == 0 || byteBuf.readableBytes() == 0) {
     chunkPosition = 0;
     return EMPTY_BUFFER;
   }
   int sizeLeft = byteBuf.readableBytes() - chunkPosition;
   if (sizeLeft == 0) {
     chunkPosition = 0;
     return EMPTY_BUFFER;
   }
   int sliceLength = length;
   if (sizeLeft < length) {
     sliceLength = sizeLeft;
   }
   ByteBuf chunk = byteBuf.slice(chunkPosition, sliceLength);
   chunkPosition += sliceLength;
   return chunk;
 }
  /**
   * Processes the file requests.
   *
   * @throws IOException The exception thrown if an i/o error occurs.
   */
  public void processFileQueue() {
    FileRequestEvent request;
    synchronized (fileQueue) {
      request = fileQueue.pop();
      if (fileQueue.isEmpty()) {
        idle = true;
      } else {
        service.addAudioContext(this);
        idle = false;
      }
    }
    if (request != null) {
      int type = request.getType();
      int file = request.getFile();
      int crc = request.getCrc();
      int version = request.getVersion();
      HttpVersion http = request.getHttp();

      ByteBuf buf = Unpooled.buffer();
      if (type == 255 && file == 255) {
        buf = Unpooled.wrappedBuffer(server.getCache().getChecksum());
      } else {
        if (server.getCache().getReferenceTable(type).getEntry(file).getCrc() != crc
            || server.getCache().getReferenceTable(type).getEntry(file).getVersion() != version) {
          writeResponse(http, buf);
          return;
        }

        try {
          buf = Unpooled.wrappedBuffer(server.getCache().getStore().read(type, file));
          if (type != 255) buf = buf.slice(0, buf.readableBytes() - 2);
        } catch (IOException e) {
          e.printStackTrace();
        }
      }

      writeResponse(http, buf);
    }
  }
  @Override
  protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
    int size = DefinedPacket.readVarInt(in);
    if (size == 0) {
      out.add(in.slice().retain());
      in.skipBytes(in.readableBytes());
    } else {
      ByteBuf decompressed = ctx.alloc().directBuffer();

      try {
        zlib.process(in, decompressed);
        Preconditions.checkState(
            decompressed.readableBytes() == size, "Decompressed packet size mismatch");

        out.add(decompressed);
        decompressed = null;
      } finally {
        if (decompressed != null) {
          decompressed.release();
        }
      }
    }
  }
  @Override
  protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> out)
      throws Exception {
    switch (state) {
      case READ_COMMON_HEADER:
        state = readCommonHeader(buffer);
        if (state == State.FRAME_ERROR) {
          if (version != spdyVersion) {
            fireProtocolException(ctx, "Unsupported version: " + version);
          } else {
            fireInvalidFrameException(ctx);
          }
        }

        // FrameDecoders must consume data when producing frames
        // All length 0 frames must be generated now
        if (length == 0) {
          if (state == State.READ_DATA_FRAME) {
            SpdyDataFrame spdyDataFrame = new DefaultSpdyDataFrame(streamId);
            spdyDataFrame.setLast((flags & SPDY_DATA_FLAG_FIN) != 0);
            state = State.READ_COMMON_HEADER;
            out.add(spdyDataFrame);
            return;
          }
          // There are no length 0 control frames
          state = State.READ_COMMON_HEADER;
        }

        return;

      case READ_CONTROL_FRAME:
        try {
          Object frame = readControlFrame(buffer);
          if (frame != null) {
            state = State.READ_COMMON_HEADER;
            out.add(frame);
          }
          return;
        } catch (IllegalArgumentException e) {
          state = State.FRAME_ERROR;
          fireInvalidFrameException(ctx);
        }
        return;

      case READ_SETTINGS_FRAME:
        if (spdySettingsFrame == null) {
          // Validate frame length against number of entries
          if (buffer.readableBytes() < 4) {
            return;
          }
          int numEntries = getUnsignedInt(buffer, buffer.readerIndex());
          buffer.skipBytes(4);
          length -= 4;

          // Each ID/Value entry is 8 bytes
          if ((length & 0x07) != 0 || length >> 3 != numEntries) {
            state = State.FRAME_ERROR;
            fireInvalidFrameException(ctx);
            return;
          }

          spdySettingsFrame = new DefaultSpdySettingsFrame();

          boolean clear = (flags & SPDY_SETTINGS_CLEAR) != 0;
          spdySettingsFrame.setClearPreviouslyPersistedSettings(clear);
        }

        int readableEntries = Math.min(buffer.readableBytes() >> 3, length >> 3);
        for (int i = 0; i < readableEntries; i++) {
          byte ID_flags = buffer.getByte(buffer.readerIndex());
          int ID = getUnsignedMedium(buffer, buffer.readerIndex() + 1);
          int value = getSignedInt(buffer, buffer.readerIndex() + 4);
          buffer.skipBytes(8);

          if (!spdySettingsFrame.isSet(ID)) {
            boolean persistVal = (ID_flags & SPDY_SETTINGS_PERSIST_VALUE) != 0;
            boolean persisted = (ID_flags & SPDY_SETTINGS_PERSISTED) != 0;
            spdySettingsFrame.setValue(ID, value, persistVal, persisted);
          }
        }

        length -= 8 * readableEntries;
        if (length == 0) {
          state = State.READ_COMMON_HEADER;
          Object frame = spdySettingsFrame;
          spdySettingsFrame = null;
          out.add(frame);
          return;
        }
        return;

      case READ_HEADER_BLOCK_FRAME:
        try {
          spdyHeadersFrame = readHeaderBlockFrame(buffer);
          if (spdyHeadersFrame != null) {
            if (length == 0) {
              state = State.READ_COMMON_HEADER;
              Object frame = spdyHeadersFrame;
              spdyHeadersFrame = null;
              out.add(frame);
              return;
            }
            state = State.READ_HEADER_BLOCK;
          }
          return;
        } catch (IllegalArgumentException e) {
          state = State.FRAME_ERROR;
          fireInvalidFrameException(ctx);
          return;
        }

      case READ_HEADER_BLOCK:
        int compressedBytes = Math.min(buffer.readableBytes(), length);
        ByteBuf compressed = buffer.slice(buffer.readerIndex(), compressedBytes);

        try {
          headerBlockDecoder.decode(compressed, spdyHeadersFrame);
        } catch (Exception e) {
          state = State.FRAME_ERROR;
          spdyHeadersFrame = null;
          ctx.fireExceptionCaught(e);
          return;
        }

        int readBytes = compressedBytes - compressed.readableBytes();
        buffer.skipBytes(readBytes);
        length -= readBytes;

        if (spdyHeadersFrame != null
            && (spdyHeadersFrame.isInvalid() || spdyHeadersFrame.isTruncated())) {

          Object frame = spdyHeadersFrame;
          spdyHeadersFrame = null;
          if (length == 0) {
            headerBlockDecoder.reset();
            state = State.READ_COMMON_HEADER;
          }
          out.add(frame);
          return;
        }

        if (length == 0) {
          Object frame = spdyHeadersFrame;
          spdyHeadersFrame = null;
          headerBlockDecoder.reset();
          state = State.READ_COMMON_HEADER;
          if (frame != null) {
            out.add(frame);
          }
        }
        return;

      case READ_DATA_FRAME:
        if (streamId == 0) {
          state = State.FRAME_ERROR;
          fireProtocolException(ctx, "Received invalid data frame");
          return;
        }

        // Generate data frames that do not exceed maxChunkSize
        int dataLength = Math.min(maxChunkSize, length);

        // Wait until entire frame is readable
        if (buffer.readableBytes() < dataLength) {
          return;
        }

        ByteBuf data = ctx.alloc().buffer(dataLength);
        data.writeBytes(buffer, dataLength);
        SpdyDataFrame spdyDataFrame = new DefaultSpdyDataFrame(streamId, data);
        length -= dataLength;

        if (length == 0) {
          spdyDataFrame.setLast((flags & SPDY_DATA_FLAG_FIN) != 0);
          state = State.READ_COMMON_HEADER;
        }
        out.add(spdyDataFrame);
        return;

      case DISCARD_FRAME:
        int numBytes = Math.min(buffer.readableBytes(), length);
        buffer.skipBytes(numBytes);
        length -= numBytes;
        if (length == 0) {
          state = State.READ_COMMON_HEADER;
        }
        return;

      case FRAME_ERROR:
        buffer.skipBytes(buffer.readableBytes());
        return;

      default:
        throw new Error("Shouldn't reach here.");
    }
  }
Example #12
0
 @Override
 public ByteBuf slice(int index, int length) {
   return buf.slice(index, length);
 }
Example #13
0
 @Override
 public ByteBuf slice() {
   return buf.slice();
 }
 @Override
 public ByteBuf slice(int var1, int var2) {
   return a.slice(var1, var2);
 }
Example #15
0
 public ByteBuf getMessage(ByteBuf e) {
   if (checkMessage(e)) {
     return e.slice(id.length, e.readableBytes() - id.length);
   }
   return null;
 }
Example #16
0
 @Override
 protected ByteBuf extractFrame(ChannelHandlerContext ctx, ByteBuf buffer, int index, int length) {
   return buffer.slice(index, length);
 }
 @Override
 public void decodeInto(ChannelHandlerContext ctx, ByteBuf buffer) {
   slotId = buffer.readInt();
   data_$eq(buffer.slice());
 }
 @Override
 public void marshallMessage(ByteBuf cb) {
   cb.writeBytes(heapBuffer.slice());
 }