/** * Computes the checksum based on the algorithm stored in the tokenHeader. * * @param data the application data * @param offset the offset where the data begins * @param len the length of the application data * @throws GSSException if an error occurs in the checksum calculation. */ byte[] getChecksum(byte[] data, int offset, int len) throws GSSException { // debug("Will do getChecksum:\n"); /* * For checksum calculation the token header bytes i.e., the first 16 * bytes following the GSSHeader, are logically prepended to the * application data to bind the data to this particular token. * * Note: There is no such requirement wrt adding padding to the * application data for checksumming, although the cryptographic * algorithm used might itself apply some padding. */ byte[] tokenHeaderBytes = tokenHeader.getBytes(); // check confidentiality int conf_flag = tokenHeaderBytes[TOKEN_FLAG_POS] & FLAG_WRAP_CONFIDENTIAL; // clear EC and RRC in token header for checksum calculation if ((conf_flag == 0) && (tokenId == WRAP_ID_v2)) { tokenHeaderBytes[4] = 0; tokenHeaderBytes[5] = 0; tokenHeaderBytes[6] = 0; tokenHeaderBytes[7] = 0; } return cipherHelper.calculateChecksum(tokenHeaderBytes, data, offset, len, key_usage); }
/** * Constructs a MessageToken from an InputStream. Bytes will be read on demand and the thread * might block if there are not enough bytes to complete the token. Please note there is no * accurate way to find out the size of a token, but we try our best to make sure there is enough * bytes to construct one. * * @param tokenId the token id that should be contained in this token as it is read. * @param context the Kerberos context associated with this token * @param is the InputStream from which to read * @param prop the MessageProp structure in which the properties of the token should be stored. * @throws GSSException if there is a problem reading from the InputStream or parsing the token */ MessageToken_v2(int tokenId, Krb5Context context, InputStream is, MessageProp prop) throws GSSException { init(tokenId, context); try { if (!confState) { prop.setPrivacy(false); } tokenHeader = new MessageTokenHeader(is, prop, tokenId); // set key_usage if (tokenId == Krb5Token.WRAP_ID_v2) { key_usage = (!initiator ? KG_USAGE_INITIATOR_SEAL : KG_USAGE_ACCEPTOR_SEAL); } else if (tokenId == Krb5Token.MIC_ID_v2) { key_usage = (!initiator ? KG_USAGE_INITIATOR_SIGN : KG_USAGE_ACCEPTOR_SIGN); } int minSize = 0; // minimal size for token data if (tokenId == Krb5Token.WRAP_ID_v2 && prop.getPrivacy()) { minSize = CONFOUNDER_SIZE + TOKEN_HEADER_SIZE + cipherHelper.getChecksumLength(); } else { minSize = cipherHelper.getChecksumLength(); } // Read token data if (tokenId == Krb5Token.MIC_ID_v2) { // The only case we can precisely predict the token data length tokenDataLen = minSize; tokenData = new byte[minSize]; readFully(is, tokenData); } else { tokenDataLen = is.available(); if (tokenDataLen >= minSize) { // read in one shot tokenData = new byte[tokenDataLen]; readFully(is, tokenData); } else { byte[] tmp = new byte[minSize]; readFully(is, tmp); // Hope while blocked in the read above, more data would // come and is.available() below contains the whole token. int more = is.available(); tokenDataLen = minSize + more; tokenData = Arrays.copyOf(tmp, tokenDataLen); readFully(is, tokenData, minSize, more); } } if (tokenId == Krb5Token.WRAP_ID_v2) { rotate(); } if (tokenId == Krb5Token.MIC_ID_v2 || (tokenId == Krb5Token.WRAP_ID_v2 && !prop.getPrivacy())) { // Read checksum int chkLen = cipherHelper.getChecksumLength(); checksum = new byte[chkLen]; System.arraycopy(tokenData, tokenDataLen - chkLen, checksum, 0, chkLen); // validate EC for Wrap tokens without confidentiality if (tokenId == Krb5Token.WRAP_ID_v2 && !prop.getPrivacy()) { if (chkLen != ec) { throw new GSSException( GSSException.DEFECTIVE_TOKEN, -1, getTokenName(tokenId) + ":" + "EC incorrect!"); } } } } catch (IOException e) { throw new GSSException( GSSException.DEFECTIVE_TOKEN, -1, getTokenName(tokenId) + ":" + e.getMessage()); } }