public void readIn(DataInputStream din) throws IOException { lastActive = din.readLong(); inBytes = din.readLong(); outBytes = din.readLong(); total = din.readLong(); rmt_ip = din.readLong(); lcl_port = din.readInt(); rmt_port = din.readInt(); for (int i = 0; i < tfPercent.length; ++i) { int num = (int) (din.readByte() & 0xFF); tfPercent[i] = ((double) num) / 100.0; } }
// 读取保存的下载信息(文件指针位置) private void read_nPos() { try { DataInputStream input = new DataInputStream(new FileInputStream(tmpFile)); int nCount = input.readInt(); nStartPos = new long[nCount]; nEndPos = new long[nCount]; for (int i = 0; i < nStartPos.length; i++) { nStartPos[i] = input.readLong(); nEndPos[i] = input.readLong(); } input.close(); } catch (IOException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }
/** Run all previously added search queries. */ public SphinxResult[] RunQueries() throws SphinxException { if (_reqs == null || _reqs.size() < 1) { _error = "no queries defined, issue AddQuery() first"; return null; } /* build the mega-request */ int nreqs = _reqs.size(); ByteArrayOutputStream reqBuf = new ByteArrayOutputStream(); try { DataOutputStream req = new DataOutputStream(reqBuf); /* its a client */ req.writeInt(0); req.writeInt(nreqs); for (int i = 0; i < nreqs; i++) req.write((byte[]) _reqs.get(i)); req.flush(); } catch (Exception e) { _error = "internal error: failed to build request: " + e; return null; } DataInputStream in = _DoRequest(SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, reqBuf); if (in == null) return null; SphinxResult[] results = new SphinxResult[nreqs]; _reqs = new ArrayList(); try { for (int ires = 0; ires < nreqs; ires++) { SphinxResult res = new SphinxResult(); results[ires] = res; int status = in.readInt(); res.setStatus(status); if (status != SEARCHD_OK) { String message = readNetUTF8(in); if (status == SEARCHD_WARNING) { res.warning = message; } else { res.error = message; continue; } } /* read fields */ int nfields = in.readInt(); res.fields = new String[nfields]; int pos = 0; for (int i = 0; i < nfields; i++) res.fields[i] = readNetUTF8(in); /* read arrts */ int nattrs = in.readInt(); res.attrTypes = new int[nattrs]; res.attrNames = new String[nattrs]; for (int i = 0; i < nattrs; i++) { String AttrName = readNetUTF8(in); int AttrType = in.readInt(); res.attrNames[i] = AttrName; res.attrTypes[i] = AttrType; } /* read match count */ int count = in.readInt(); int id64 = in.readInt(); res.matches = new SphinxMatch[count]; for (int matchesNo = 0; matchesNo < count; matchesNo++) { SphinxMatch docInfo; docInfo = new SphinxMatch((id64 == 0) ? readDword(in) : in.readLong(), in.readInt()); /* read matches */ for (int attrNumber = 0; attrNumber < res.attrTypes.length; attrNumber++) { String attrName = res.attrNames[attrNumber]; int type = res.attrTypes[attrNumber]; /* handle bigints */ if (type == SPH_ATTR_BIGINT) { docInfo.attrValues.add(attrNumber, new Long(in.readLong())); continue; } /* handle floats */ if (type == SPH_ATTR_FLOAT) { docInfo.attrValues.add(attrNumber, new Float(in.readFloat())); continue; } /* handle strings */ if (type == SPH_ATTR_STRING) { String s = readNetUTF8(in); docInfo.attrValues.add(attrNumber, s); continue; } /* handle everything else as unsigned ints */ long val = readDword(in); if (type == SPH_ATTR_MULTI) { long[] vals = new long[(int) val]; for (int k = 0; k < val; k++) vals[k] = readDword(in); docInfo.attrValues.add(attrNumber, vals); } else if (type == SPH_ATTR_MULTI64) { val = val / 2; long[] vals = new long[(int) val]; for (int k = 0; k < val; k++) vals[k] = in.readLong(); docInfo.attrValues.add(attrNumber, vals); } else { docInfo.attrValues.add(attrNumber, new Long(val)); } } res.matches[matchesNo] = docInfo; } res.total = in.readInt(); res.totalFound = in.readInt(); res.time = in.readInt() / 1000.0f; res.words = new SphinxWordInfo[in.readInt()]; for (int i = 0; i < res.words.length; i++) res.words[i] = new SphinxWordInfo(readNetUTF8(in), readDword(in), readDword(in)); } return results; } catch (IOException e) { _error = "incomplete reply"; return null; } }