/** * If the message produces a Gnutella message of over 65kb, it needs to be reduced in size by * splitting the message into a number of smaller messages. Whenever a message is split any * resource ID list the message may have contained will be split into an entirely separate * message. The number of results to store in each message is dynamically generated based on the * size of the original serialized message. * * @return An array of new, smaller messages (or an array containing the original message if it * was already small enough) */ public List<SearchResponseMessage> split() { ArrayList<SearchResponseMessage> returnList = new ArrayList<SearchResponseMessage>(); int messageSize; try { messageSize = getSerializedBytes(); } catch (IOException e) { // Just print the stack trace and return the original message if the size // cannot be determined (not sure how this would happen) e.printStackTrace(); returnList.add(this); return returnList; } if (messageSize < MAX_MESSAGE_SIZE) { // Message is already small enough, just wrap it in an array returnList.add(this); return returnList; } else { // The message exceeds the maximum message size and must be split SearchResponseMessage splitMessage = new SearchResponseMessage(getId(), getCommunityId(), getUrlPrefix()); returnList.add(splitMessage); // If a hosted resource list or metrics exist, separate them into a separate // message if ((hostedResIdList != null && !hostedResIdList.isEmpty()) || (!this.getMetricNames().isEmpty())) { // System.out.println("== Seperating out resource list and metrics."); // DEBUG if (hostedResIdList != null && !hostedResIdList.isEmpty()) { splitMessage.setHostedResIdList(hostedResIdList); } if (!this.getMetricNames().isEmpty()) { for (String metricName : this.getMetricNames()) { splitMessage.addTrustMetric(metricName, this.getTrustMetric(metricName)); } } splitMessage.setUrlPrefix(getUrlPrefix()); // Generate a new message (the first to actually hold resource information) splitMessage = new SearchResponseMessage(getId(), getCommunityId(), getUrlPrefix()); returnList.add(splitMessage); } // Add all SearchResponses to the newly generated "smaller" messages, // until each message approaches the maximum message size int totalResponseBytes = 0; // The number of bytes used in the current "smaller" message for (SearchResponse curResponse : results) { try { int curResponseSize = getSerializedResponseBytes(curResponse); if ((curResponseSize + totalResponseBytes) > (MAX_MESSAGE_SIZE - HEADER_PADDING_BYTES) && (totalResponseBytes > 0)) { // Generate a new response message whenever the maximum message size would be reached, // and the message already contains at least 1 response. // TODO: If the max message size would be exceeded by a single response, the response is // added anyway and compression should bring it under the maximum size. // Otherwise, nothing can really be done here other than dropping the response entirely // (i.e. resources over 64KB would be unsearchable) /* System.out.println("=== Generating new message (Current: " + totalResponseBytes + " bytes" + "\tTried to add: " + curResponseSize + " bytes"); */ // Debug splitMessage = new SearchResponseMessage(getId(), getCommunityId(), getUrlPrefix()); splitMessage.setUrlPrefix(getUrlPrefix()); returnList.add(splitMessage); totalResponseBytes = 0; } // System.out.println("=== Adding response to current message: " + curResponseSize + " // bytes"); // Debug splitMessage.addResult(curResponse); totalResponseBytes += curResponseSize; } catch (IOException e) { // Just print the trace and ignore the search response if its size cannot // be determined (not sure how this would happen) e.printStackTrace(); } } // System.out.println("=== Size of last message: " + totalResponseBytes + " bytes."); // Debug return returnList; } }
/** * Parses a search response from the given XML fragment. * * @param xmlNode the XML containing the search response * @return the search response * @throws MalformedPeerMessageException on a badly formed or invalid message */ public static SearchResponseMessage parse(Node xmlNode) throws MalformedPeerMessageException { // check first node name try { if (xmlNode.getNodeName().equals(GenericPeerMessage.X_UP2P_MESSAGE)) { Node currentNode = xmlNode.getFirstChild(); while (currentNode != null && currentNode.getNodeType() != Node.ELEMENT_NODE) currentNode = currentNode.getNextSibling(); Element current = (Element) currentNode; String communityId = current.getAttribute(X_COMMUNITY); String searchId = current.getAttribute(X_SEARCH_ID); String urlPrefix = current.getAttribute(X_URL_PREFIX); SearchResponseMessage response = new SearchResponseMessage(searchId, communityId, urlPrefix); // add result for each searchResult child, and process the metric list // if it exists LocationEntry[] entries = null; NodeList resultList = current.getChildNodes(); for (int i = 0; i < resultList.getLength(); i++) { // check node type to avoid mixed content if (resultList.item(i).getNodeType() == Node.ELEMENT_NODE && resultList.item(i).getNodeName().equals(X_SEARCH_RESULT)) { // A Search Result element has been found String resourceTitle = null; String resourceId = null; String fileName = null; Element result = (Element) resultList.item(i); resourceTitle = result.getAttribute(X_TITLE); resourceId = result.getAttribute(X_RESOURCE_ID); fileName = result.getAttribute(X_FILENAME); // Construct the SearchResponse with what we have so far (missing DOM, locations are not // serialized) SearchResponse newresponse = new SearchResponse( resourceId, resourceTitle, communityId, fileName, null, false, searchId); if (result.hasChildNodes()) { // in this case, yes! we have a winner! // some gymnastics to create the exact same class of Document, in order to be able to // adopt nodes... // DOMImplementation impl = locationNode.getOwnerDocument().getImplementation(); Document metadata = TransformerHelper .newDocument(); // impl.createDocument(null, // "qname",locationNode.getOwnerDocument().getDoctype() ); /* System.out.println("adopter class:"+ metadata.getClass()); System.out.println("adoptee class:" + locationNode.getOwnerDocument().getClass()); */ // Import the metadata into the new document context NodeList copyNodes = result.getChildNodes(); for (int k = 0; k < copyNodes.getLength(); k++) { Node importedNode = metadata.importNode(copyNodes.item(k), true); metadata.appendChild(importedNode); } // add the DOM to the searchResponse under construction newresponse.addResourceDOM(metadata); } response.addResult(newresponse); } else if (resultList.item(i).getNodeType() == Node.ELEMENT_NODE && resultList.item(i).getNodeName().equals(X_HOSTED_RES_ID_LIST)) { // A hosted resource id list element has been found // The node is a metric list, build the metric list for this search response List<String> respMetrics = new ArrayList<String>(); NodeList metricIds = resultList.item(i).getChildNodes(); for (int j = 0; j < metricIds.getLength(); j++) { respMetrics.add(metricIds.item(j).getTextContent()); } response.setHostedResIdList(respMetrics); } else if (resultList.item(i).getNodeType() == Node.ELEMENT_NODE && resultList.item(i).getNodeName().equals(X_TRUST_METRIC)) { // A trust metric element has been found String metricName = ((Element) resultList.item(i)).getAttribute(X_ATTR_METRIC_NAME); String metricValue = ((Element) resultList.item(i)).getAttribute(X_ATTR_METRIC_VALUE); response.addTrustMetric(metricName, metricValue); } } return response; } throw new MalformedPeerMessageException( ERROR_MSG + " Root node name: " + xmlNode.getNodeName()); } catch (Exception e) { e.printStackTrace(); throw new MalformedPeerMessageException(ERROR_MSG); } }