/** * Packs a Pdu into the ByteBuffer. * * @throws java.nio.BufferOverflowException if buff is too small * @throws java.nio.ReadOnlyBufferException if buff is read only * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin writing * @since ?? */ public void marshal(java.nio.ByteBuffer buff) { super.marshal(buff); minefieldID.marshal(buff); requestingEntityID.marshal(buff); buff.putShort((short) minefieldSequenceNumbeer); buff.put((byte) requestID); buff.put((byte) pduSequenceNumber); buff.put((byte) numberOfPdus); buff.put((byte) mineLocation.size()); buff.put((byte) sensorTypes.size()); buff.put((byte) pad2); buff.putInt((int) dataFilter); mineType.marshal(buff); for (int idx = 0; idx < sensorTypes.size(); idx++) { TwoByteChunk aTwoByteChunk = (TwoByteChunk) sensorTypes.get(idx); aTwoByteChunk.marshal(buff); } // end of list marshalling buff.put((byte) pad3); for (int idx = 0; idx < mineLocation.size(); idx++) { Vector3Float aVector3Float = (Vector3Float) mineLocation.get(idx); aVector3Float.marshal(buff); } // end of list marshalling } // end of marshal method
/** * Unpacks a Pdu from the underlying data. * * @throws java.nio.BufferUnderflowException if buff is too small * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin reading * @since ?? */ public void unmarshal(java.nio.ByteBuffer buff) { super.unmarshal(buff); minefieldID.unmarshal(buff); requestingEntityID.unmarshal(buff); minefieldSequenceNumbeer = (int) (buff.getShort() & 0xFFFF); requestID = (short) (buff.get() & 0xFF); pduSequenceNumber = (short) (buff.get() & 0xFF); numberOfPdus = (short) (buff.get() & 0xFF); numberOfMinesInThisPdu = (short) (buff.get() & 0xFF); numberOfSensorTypes = (short) (buff.get() & 0xFF); pad2 = (short) (buff.get() & 0xFF); dataFilter = buff.getInt(); mineType.unmarshal(buff); for (int idx = 0; idx < numberOfSensorTypes; idx++) { TwoByteChunk anX = new TwoByteChunk(); anX.unmarshal(buff); sensorTypes.add(anX); } pad3 = (short) (buff.get() & 0xFF); for (int idx = 0; idx < numberOfMinesInThisPdu; idx++) { Vector3Float anX = new Vector3Float(); anX.unmarshal(buff); mineLocation.add(anX); } } // end of unmarshal method
private void updateWithCharBuf() { final int reqSize = (int) charEncoder.maxBytesPerChar() * charBuff.position(); if (byteBuff.capacity() < reqSize) { byteBuff = java.nio.ByteBuffer.allocate(2 * reqSize); } // Make ready for read charBuff.flip(); final CoderResult cr = charEncoder.encode(charBuff, byteBuff, true); try { if (cr.isError()) cr.throwException(); // Make ready for read byteBuff.flip(); final byte[] byts = byteBuff.array(); final int len = byteBuff.remaining(); final int strt = byteBuff.arrayOffset(); digest.update(byts, strt, len); } catch (final CharacterCodingException e) { throw new OXFException(e); } catch (java.nio.BufferOverflowException e) { throw new OXFException(e); } catch (java.nio.BufferUnderflowException e) { throw new OXFException(e); } finally { // Make ready for write charBuff.clear(); byteBuff.clear(); } }
/** * Unpacks a Pdu from the underlying data. * * @throws java.nio.BufferUnderflowException if buff is too small * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin reading * @since ?? */ public void unmarshal(java.nio.ByteBuffer buff) { recordType = (short) (buff.get() & 0xFF); changeIndicator = (short) (buff.get() & 0xFF); associationStatus = (short) (buff.get() & 0xFF); associationType = (short) (buff.get() & 0xFF); entityID.unmarshal(buff); ownStationLocation = (int) (buff.getShort() & 0xFFFF); physicalConnectionType = (short) (buff.get() & 0xFF); groupMemberType = (short) (buff.get() & 0xFF); groupNumber = (int) (buff.getShort() & 0xFFFF); } // end of unmarshal method
/** * Packs a Pdu into the ByteBuffer. * * @throws java.nio.BufferOverflowException if buff is too small * @throws java.nio.ReadOnlyBufferException if buff is read only * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin writing * @since ?? */ public void marshal(java.nio.ByteBuffer buff) { buff.put((byte) recordType); buff.put((byte) changeIndicator); buff.put((byte) associationStatus); buff.put((byte) associationType); entityID.marshal(buff); buff.putShort((short) ownStationLocation); buff.put((byte) physicalConnectionType); buff.put((byte) groupMemberType); buff.putShort((short) groupNumber); } // end of marshal method
/** * Packs a Pdu into the ByteBuffer. * * @throws java.nio.BufferOverflowException if buff is too small * @throws java.nio.ReadOnlyBufferException if buff is read only * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin writing * @since ?? */ public void marshal(java.nio.ByteBuffer buff) { super.marshal(buff); minefieldID.marshal(buff); requestingEntityID.marshal(buff); buff.put((byte) requestID); buff.put((byte) requestedPerimeterPoints.size()); buff.put((byte) pad2); buff.put((byte) sensorTypes.size()); buff.putInt((int) dataFilter); requestedMineType.marshal(buff); for (int idx = 0; idx < requestedPerimeterPoints.size(); idx++) { Point aPoint = (Point) requestedPerimeterPoints.get(idx); aPoint.marshal(buff); } // end of list marshalling for (int idx = 0; idx < sensorTypes.size(); idx++) { TwoByteChunk aTwoByteChunk = (TwoByteChunk) sensorTypes.get(idx); aTwoByteChunk.marshal(buff); } // end of list marshalling } // end of marshal method
/** * Unpacks a Pdu from the underlying data. * * @throws java.nio.BufferUnderflowException if buff is too small * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin reading * @since ?? */ public void unmarshal(java.nio.ByteBuffer buff) { super.unmarshal(buff); minefieldID.unmarshal(buff); requestingEntityID.unmarshal(buff); requestID = (short) (buff.get() & 0xFF); numberOfPerimeterPoints = (short) (buff.get() & 0xFF); pad2 = (short) (buff.get() & 0xFF); numberOfSensorTypes = (short) (buff.get() & 0xFF); dataFilter = buff.getInt(); requestedMineType.unmarshal(buff); for (int idx = 0; idx < numberOfPerimeterPoints; idx++) { Point anX = new Point(); anX.unmarshal(buff); requestedPerimeterPoints.add(anX); } for (int idx = 0; idx < numberOfSensorTypes; idx++) { TwoByteChunk anX = new TwoByteChunk(); anX.unmarshal(buff); sensorTypes.add(anX); } } // end of unmarshal method
/** * Packs a Pdu into the ByteBuffer. * * @throws java.nio.BufferOverflowException if buff is too small * @throws java.nio.ReadOnlyBufferException if buff is read only * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin writing * @since ?? */ public void marshal(java.nio.ByteBuffer buff) { super.marshal(buff); entityId.marshal(buff); buff.putShort((short) communicationsDeviceID); buff.putShort((short) encodingScheme); buff.putShort((short) tdlType); buff.putInt((int) sampleRate); buff.putShort((short) data.size()); buff.putShort((short) samples); for (int idx = 0; idx < data.size(); idx++) { OneByteChunk aOneByteChunk = (OneByteChunk) data.get(idx); aOneByteChunk.marshal(buff); } // end of list marshalling } // end of marshal method
/** * Unpacks a Pdu from the underlying data. * * @throws java.nio.BufferUnderflowException if buff is too small * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin reading * @since ?? */ public void unmarshal(java.nio.ByteBuffer buff) { super.unmarshal(buff); entityId.unmarshal(buff); communicationsDeviceID = (int) (buff.getShort() & 0xFFFF); encodingScheme = (int) (buff.getShort() & 0xFFFF); tdlType = (int) (buff.getShort() & 0xFFFF); sampleRate = buff.getInt(); dataLength = (int) (buff.getShort() & 0xFFFF); samples = (int) (buff.getShort() & 0xFFFF); for (int idx = 0; idx < dataLength; idx++) { OneByteChunk anX = new OneByteChunk(); anX.unmarshal(buff); data.add(anX); } } // end of unmarshal method
/** * Packs a Pdu into the ByteBuffer. * * @throws java.nio.BufferOverflowException if buff is too small * @throws java.nio.ReadOnlyBufferException if buff is read only * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin writing * @since ?? */ public void marshal(java.nio.ByteBuffer buff) { buff.putFloat((float) x); buff.putFloat((float) y); buff.putFloat((float) z); } // end of marshal method
/** * Unpacks a Pdu from the underlying data. * * @throws java.nio.BufferUnderflowException if buff is too small * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin reading * @since ?? */ public void unmarshal(java.nio.ByteBuffer buff) { x = buff.getFloat(); y = buff.getFloat(); z = buff.getFloat(); } // end of unmarshal method
/** * Unpacks a Pdu from the underlying data. * * @throws java.nio.BufferUnderflowException if buff is too small * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin reading * @since ?? */ public void unmarshal(java.nio.ByteBuffer buff) { for (int idx = 0; idx < otherParameters.length; idx++) { otherParameters[idx] = buff.get(); } // end of array unmarshaling } // end of unmarshal method
/** * Packs a Pdu into the ByteBuffer. * * @throws java.nio.BufferOverflowException if buff is too small * @throws java.nio.ReadOnlyBufferException if buff is read only * @see java.nio.ByteBuffer * @param buff The ByteBuffer at the position to begin writing * @since ?? */ public void marshal(java.nio.ByteBuffer buff) { for (int idx = 0; idx < otherParameters.length; idx++) { buff.put((byte) otherParameters[idx]); } // end of array marshaling } // end of marshal method
public void startDocument() throws SAXException { charBuff.clear(); byteBuff.clear(); charEncoder.reset(); }
/** * This digester is based on some existing public document (not sure which). There are some * changes though. It is not clear anymore why we used that document as a base, as this is purely * internal. * * <p>The bottom line is that the digest should change whenever the infoset of the source XML * document changes. */ public static class DigestContentHandler implements XMLReceiver { private static final int ELEMENT_CODE = Node.ELEMENT_NODE; private static final int ATTRIBUTE_CODE = Node.ATTRIBUTE_NODE; private static final int TEXT_CODE = Node.TEXT_NODE; private static final int PROCESSING_INSTRUCTION_CODE = Node.PROCESSING_INSTRUCTION_NODE; private static final int NAMESPACE_CODE = 0XAA01; // some code that is none of the above private static final int COMMENT_CODE = 0XAA02; // some code that is none of the above /** * 4/6/2005 d : Previously we were using String.getBytes( "UnicodeBigUnmarked" ). ( Believe the * code was copied from RFC 2803 ). This first tries to get a java.nio.Charset with the name if * this fails it uses a sun.io.CharToByteConverter. Now in the case of "UnicodeBigUnmarked" * there is no such Charset so a CharToByteConverter, utf-16be, is used. Unfortunately this * negative lookup is expensive. ( Costing us a full second in the 50thread/512MB test. ) The * solution, of course, is just to use get the appropriate Charset and hold on to it. */ private static final Charset utf16BECharset = Charset.forName("UTF-16BE"); /** Encoder has state and therefore cannot be shared across threads. */ private final CharsetEncoder charEncoder = utf16BECharset.newEncoder(); private java.nio.CharBuffer charBuff = java.nio.CharBuffer.allocate(64); private java.nio.ByteBuffer byteBuff = java.nio.ByteBuffer.allocate(128); private final MessageDigest digest = SecureUtils.defaultMessageDigest(); private void ensureCharBuffRemaining(final int size) { if (charBuff.remaining() < size) { final int cpcty = (charBuff.capacity() + size) * 2; final java.nio.CharBuffer newChBuf = java.nio.CharBuffer.allocate(cpcty); newChBuf.put(charBuff); charBuff = newChBuf; } } private void updateWithCharBuf() { final int reqSize = (int) charEncoder.maxBytesPerChar() * charBuff.position(); if (byteBuff.capacity() < reqSize) { byteBuff = java.nio.ByteBuffer.allocate(2 * reqSize); } // Make ready for read charBuff.flip(); final CoderResult cr = charEncoder.encode(charBuff, byteBuff, true); try { if (cr.isError()) cr.throwException(); // Make ready for read byteBuff.flip(); final byte[] byts = byteBuff.array(); final int len = byteBuff.remaining(); final int strt = byteBuff.arrayOffset(); digest.update(byts, strt, len); } catch (final CharacterCodingException e) { throw new OXFException(e); } catch (java.nio.BufferOverflowException e) { throw new OXFException(e); } catch (java.nio.BufferUnderflowException e) { throw new OXFException(e); } finally { // Make ready for write charBuff.clear(); byteBuff.clear(); } } private void updateWith(final String s) { addToCharBuff(s); updateWithCharBuf(); } private void updateWith(final char[] chArr, final int ofst, final int len) { ensureCharBuffRemaining(len); charBuff.put(chArr, ofst, len); updateWithCharBuf(); } private void addToCharBuff(final char c) { ensureCharBuffRemaining(1); charBuff.put(c); } private void addToCharBuff(final String s) { final int size = s.length(); ensureCharBuffRemaining(size); charBuff.put(s); } public byte[] getResult() { return digest.digest(); } public void setDocumentLocator(Locator locator) {} public void startDocument() throws SAXException { charBuff.clear(); byteBuff.clear(); charEncoder.reset(); } public void endDocument() throws SAXException {} public void startPrefixMapping(String prefix, String uri) throws SAXException { digest.update((byte) ((NAMESPACE_CODE >> 24) & 0xff)); digest.update((byte) ((NAMESPACE_CODE >> 16) & 0xff)); digest.update((byte) ((NAMESPACE_CODE >> 8) & 0xff)); digest.update((byte) (NAMESPACE_CODE & 0xff)); updateWith(prefix); digest.update((byte) 0); digest.update((byte) 0); updateWith(uri); digest.update((byte) 0); digest.update((byte) 0); } public void endPrefixMapping(String prefix) throws SAXException {} public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { digest.update((byte) ((ELEMENT_CODE >> 24) & 0xff)); digest.update((byte) ((ELEMENT_CODE >> 16) & 0xff)); digest.update((byte) ((ELEMENT_CODE >> 8) & 0xff)); digest.update((byte) (ELEMENT_CODE & 0xff)); addToCharBuff('{'); addToCharBuff(namespaceURI); addToCharBuff('}'); addToCharBuff(localName); updateWithCharBuf(); digest.update((byte) 0); digest.update((byte) 0); int attCount = atts.getLength(); digest.update((byte) ((attCount >> 24) & 0xff)); digest.update((byte) ((attCount >> 16) & 0xff)); digest.update((byte) ((attCount >> 8) & 0xff)); digest.update((byte) (attCount & 0xff)); for (int i = 0; i < attCount; i++) { digest.update((byte) ((ATTRIBUTE_CODE >> 24) & 0xff)); digest.update((byte) ((ATTRIBUTE_CODE >> 16) & 0xff)); digest.update((byte) ((ATTRIBUTE_CODE >> 8) & 0xff)); digest.update((byte) (ATTRIBUTE_CODE & 0xff)); final String attURI = atts.getURI(i); final String attNam = atts.getLocalName(i); addToCharBuff('{'); addToCharBuff(attURI); addToCharBuff('}'); addToCharBuff(attNam); updateWithCharBuf(); digest.update((byte) 0); digest.update((byte) 0); final String val = atts.getValue(i); updateWith(val); } } public void endElement(String namespaceURI, String localName, String qName) throws SAXException {} public void characters(char ch[], int start, int length) throws SAXException { digest.update((byte) ((TEXT_CODE >> 24) & 0xff)); digest.update((byte) ((TEXT_CODE >> 16) & 0xff)); digest.update((byte) ((TEXT_CODE >> 8) & 0xff)); digest.update((byte) (TEXT_CODE & 0xff)); updateWith(ch, start, length); digest.update((byte) 0); digest.update((byte) 0); } public void ignorableWhitespace(char ch[], int start, int length) throws SAXException {} public void processingInstruction(String target, String data) throws SAXException { digest.update((byte) ((PROCESSING_INSTRUCTION_CODE >> 24) & 0xff)); digest.update((byte) ((PROCESSING_INSTRUCTION_CODE >> 16) & 0xff)); digest.update((byte) ((PROCESSING_INSTRUCTION_CODE >> 8) & 0xff)); digest.update((byte) (PROCESSING_INSTRUCTION_CODE & 0xff)); updateWith(target); digest.update((byte) 0); digest.update((byte) 0); updateWith(data); digest.update((byte) 0); digest.update((byte) 0); } public void skippedEntity(String name) throws SAXException {} public void startDTD(String name, String publicId, String systemId) throws SAXException {} public void endDTD() throws SAXException {} public void startEntity(String name) throws SAXException {} public void endEntity(String name) throws SAXException {} public void startCDATA() throws SAXException {} public void endCDATA() throws SAXException {} public void comment(char[] ch, int start, int length) throws SAXException { // We do consider comments significant for the purpose of digesting. But should this be an // option? digest.update((byte) ((COMMENT_CODE >> 24) & 0xff)); digest.update((byte) ((COMMENT_CODE >> 16) & 0xff)); digest.update((byte) ((COMMENT_CODE >> 8) & 0xff)); digest.update((byte) (COMMENT_CODE & 0xff)); updateWith(ch, start, length); digest.update((byte) 0); digest.update((byte) 0); } }