private void _write(String service, String queryString, InputStream in) throws SQLException { String md5 = StringUtils.md5Hash(createHashRoot(service, queryString)); // String md5 = StringUtils.md5Hash(queryString); Timestamp timestamp = new Timestamp(new GregorianCalendar().getTimeInMillis()); // Reader reader = new InputStreamReader(in); ResultSet rs = null; try { rs = executeQuery(Query.LOOKUP, md5); if (rs != null && rs.next()) { execute(Query.UPDATE, null, in, timestamp, md5); } else { execute(Query.INSERT, null, md5, queryString, in, timestamp); } } finally { if (rs != null) { rs.close(); } } // return lookup(queryString); }
public static void main(String[] args) throws Exception { PropertyConfigurator.configure("log4j.properties"); File dir = new File("/home/raven/Desktop/lgd/QueryLogs/all/"); LogDirectory logDir = new LogDirectory(dir, Pattern.compile("access.*")); Date low = new GregorianCalendar(2011, 3, 10, 0, 0, 0).getTime(); Date high = new GregorianCalendar(2011, 3, 17, 12, 0, 0).getTime(); // low = new GregorianCalendar(2011, 3, 17, 0, 0, 0).getTime(); // high = new GregorianCalendar(2011, 3, 19, 12, 0, 0).getTime(); low = null; high = null; File outFile = new File("/home/raven/Desktop/LGDSparql.txt"); PrintWriter writer = new PrintWriter(outFile); RangeApacheLogEntryIterator it = logDir.getIterator(low, high, true, true); DateFormat dateFormat = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss Z"); int i = 0; while (it.hasNext()) { ApacheLogEntry entry = it.next(); String uri = entry.getRequest().getUrl(); // * try { StringUtils.decodeUtf8(uri); } catch (Exception e) { e.printStackTrace(); continue; } // */ if (!(uri.contains("sparql") && uri.contains("query=") && uri.contains("linkedgeodata"))) continue; writer.println(dateFormat.format(entry.getDate()) + "\t" + entry.getHostname() + "\t" + uri); // ++i; // System.out.println(i + " --- " + entry.getDate() + " --- "); } // processFile(new File("/home/raven/Desktop/lgd/QueryLogs/access.log")); }
public CacheEntry _lookup(String service, String queryString) throws SQLException { String md5 = StringUtils.md5Hash(createHashRoot(service, queryString)); // String md5 = StringUtils.md5Hash(queryString); ResultSet rs = executeQuery(Query.LOOKUP, md5); try { if (rs.next()) { Timestamp timestamp = rs.getTimestamp("time"); Blob data = rs.getBlob("data"); if (validateHash) { String cachedQueryString = rs.getString("query_string"); if (!cachedQueryString.equals(queryString)) { logger.error( "HASH-CLASH:\n" + "Service: " + service + "\nNew QueryString: " + queryString + "\nOld QueryString: " + cachedQueryString); return null; } } // return new CacheEntryBase(timestamp.getTime(), lifespan, new // InputStreamProviderResultSetBlob(rs, data)); if (true) { throw new RuntimeException("Deprecated; do not use"); } } if (rs.next()) { logger.warn("Multiple cache hits found, just one expected."); } } finally { // Note we must not close the rs here - the InputStreamProvider of the CacheEntry must be // closed // SqlUtils.close(rs); } return null; }
public static void processFile(File file) throws IOException, ParseException { BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file))); String line; while ((line = reader.readLine()) != null) { ApacheLogEntry entry = ApacheLogEntry.parse(line); String uri = entry.getRequest().getUrl(); if (!(uri.contains("sparql") && uri.contains("query=") && uri.contains("linkedgeodata"))) continue; System.out.println(StringUtils.decodeUtf8(uri)); // String uri = parts[] } }