/** * Loads the siteinfo object from Scalaris. * * @return <tt>true</tt> on success, <tt>false</tt> if not found or no connection available */ @Override protected synchronized boolean loadSiteInfo() { TransactionSingleOp scalaris_single; try { Connection conn = cPool.getConnection(MAX_WAIT_FOR_CONNECTION); if (conn == null) { System.err.println( "Could not get a connection to Scalaris for siteinfo, waited " + MAX_WAIT_FOR_CONNECTION + "ms"); return false; } scalaris_single = new TransactionSingleOp(conn); try { siteinfo = scalaris_single.read("siteinfo").jsonValue(SiteInfo.class); // TODO: fix siteinfo's base url namespace = new MyNamespace(siteinfo); initialized = true; setLocalisedSpecialPageNames(); } catch (Exception e) { // no warning here - this probably is an empty wiki return false; } } catch (Exception e) { System.out.println(e); e.printStackTrace(); return false; } return true; }
/** * Sets up the connection to the Scalaris erlang node once on the server. * * <p>In case of errors, the <tt>error</tt> and <tt>notice</tt> attributes of the <tt>request</tt> * object are set appropriately if not <tt>null</tt>. * * @param request the request to the servlet (may be <tt>null</tt>) * @return a valid connection of <tt>null</tt> if an error occurred */ @Override protected Connection getConnection(HttpServletRequest request) { try { Connection conn = cPool.getConnection(MAX_WAIT_FOR_CONNECTION); if (conn == null) { System.err.println( "Could not get a connection to Scalaris, waited " + MAX_WAIT_FOR_CONNECTION + "ms"); if (request != null) { setParam_error(request, "ERROR: DB unavailable"); addToParam_notice( request, "error: <pre>Could not get a connection to Scalaris, waited " + MAX_WAIT_FOR_CONNECTION + "ms</pre>"); } return null; } return conn; } catch (Exception e) { if (request != null) { setParam_error(request, "ERROR: DB unavailable"); addToParam_notice(request, "error: <pre>" + e.getMessage() + "</pre>"); } else { System.out.println(e); e.printStackTrace(); } return null; } }
private void startImport(String dumpsPath, String req_import, int maxRevisions, Calendar maxTime) throws RuntimeException { currentImport = req_import; importLog = new CircularByteArrayOutputStream(1024 * 1024); PrintStream ps = new PrintStream(importLog); ps.println("starting import..."); String fileName = dumpsPath + File.separator + req_import; if (fileName.endsWith(".db")) { importHandler = new WikiDumpPreparedSQLiteToScalaris( fileName, Options.getInstance(), 1, 1, cPool.getConnectionFactory()); } else { importHandler = new WikiDumpToScalarisHandler( de.zib.scalaris.examples.wikipedia.data.xml.Main.blacklist, null, maxRevisions, null, maxTime, cPool.getConnectionFactory()); } importHandler.setMsgOut(ps); this.new ImportThread(importHandler, fileName, ps).start(); }
/** * Releases the connection back into the Scalaris connection pool. * * @param request the request to the servlet or <tt>null</tt> if there is none * @param conn the connection to release */ @Override protected void releaseConnection(HttpServletRequest request, Connection conn) { cPool.releaseConnection(conn); }
/** * Shows a page for importing a DB dump. * * @param request the request of the current operation * @param response the response of the current operation * @throws IOException * @throws ServletException */ @Override protected synchronized void showImportPage( HttpServletRequest request, HttpServletResponse response, Connection connection) throws ServletException, IOException { WikiPageBean page = new WikiPageBean(); page.setNotAvailable(true); request.setAttribute("pageBean", page); StringBuilder content = new StringBuilder(); String dumpsPath = getServletContext().getRealPath("/WEB-INF/dumps"); if (currentImport.isEmpty() && importHandler == null) { TreeSet<String> availableDumps = new TreeSet<String>(); File dumpsDir = new File(dumpsPath); if (dumpsDir.isDirectory()) { availableDumps.addAll( Arrays.asList( dumpsDir.list( new FilenameFilter() { @Override public boolean accept(File dir, String name) { return MATCH_WIKI_IMPORT_FILE.matcher(name).matches(); } }))); } // get parameters: String req_import = request.getParameter("import"); if (req_import == null || !availableDumps.contains(req_import)) { content.append("<h2>Please select a wiki dump to import</h2>\n"); content.append("<form method=\"get\" action=\"wiki\">\n"); content.append("<p>\n"); content.append(" <select name=\"import\" size=\"10\" style=\"width:500px;\">\n"); for (String dump : availableDumps) { content.append(" <option>" + dump + "</option>\n"); } content.append(" </select>\n"); content.append(" </p>\n"); content.append( " <p>Maximum number of revisions per page: <input name=\"max_revisions\" size=\"2\" value=\"2\" /></br><span style=\"font-size:80%\">(<tt>-1</tt> to import everything)</span></p>\n"); content.append( " <p>No entry newer than: <input name=\"max_time\" size=\"20\" value=\"\" /></br><span style=\"font-size:80%\">(ISO8601 format, e.g. <tt>2004-01-07T08:09:29Z</tt> - leave empty to import everything)</span></p>\n"); content.append(" <input type=\"submit\" value=\"Import\" />\n"); content.append("</form>\n"); content.append( "<p>Note: You will be re-directed to the main page when the import finishes.</p>"); } else { content.append("<h2>Importing \"" + req_import + "\"...</h2>\n"); try { currentImport = req_import; int maxRevisions = parseInt(request.getParameter("max_revisions"), 2); Calendar maxTime = parseDate(request.getParameter("max_time"), null); importLog = new CircularByteArrayOutputStream(1024 * 1024); PrintStream ps = new PrintStream(importLog); ps.println("starting import..."); String fileName = dumpsPath + File.separator + req_import; if (fileName.endsWith(".db")) { importHandler = new WikiDumpPreparedSQLiteToScalaris(fileName, cPool.getConnectionFactory()); } else { importHandler = new WikiDumpToScalarisHandler( de.zib.scalaris.examples.wikipedia.data.xml.Main.blacklist, null, maxRevisions, null, maxTime, cPool.getConnectionFactory()); } importHandler.setMsgOut(ps); this.new ImportThread(importHandler, fileName, ps).start(); response.setHeader("Refresh", "2; url = wiki?import=" + currentImport); content.append( "<p>Current log file (refreshed automatically every " + IMPORT_REDIRECT_EVERY + " seconds):</p>\n"); content.append("<pre>"); content.append("starting import...\n"); content.append("</pre>"); content.append("<p><a href=\"wiki?import=" + currentImport + "\">refresh</a></p>"); content.append( "<p><a href=\"wiki?stop_import=" + currentImport + "\">stop</a> (WARNING: pages may be incomplete due to missing templates)</p>"); } catch (Exception e) { setParam_error(request, "ERROR: import failed"); addToParam_notice(request, "error: <pre>" + e.getMessage() + "</pre>"); currentImport = ""; } } } else { content.append("<h2>Importing \"" + currentImport + "\"...</h2>\n"); String req_stop_import = request.getParameter("stop_import"); boolean stopImport; if (req_stop_import == null || req_stop_import.isEmpty()) { stopImport = false; response.setHeader( "Refresh", IMPORT_REDIRECT_EVERY + "; url = wiki?import=" + currentImport); content.append( "<p>Current log file (refreshed automatically every " + IMPORT_REDIRECT_EVERY + " seconds):</p>\n"); } else { stopImport = true; importHandler.stopParsing(); content.append("<p>Current log file:</p>\n"); } content.append("<pre>"); String log = importLog.toString(); int start = log.indexOf("\n"); if (start != -1) { content.append(log.substring(start)); } content.append("</pre>"); if (!stopImport) { content.append("<p><a href=\"wiki?import=" + currentImport + "\">refresh</a></p>"); content.append( "<p><a href=\"wiki?stop_import=" + currentImport + "\">stop</a> (WARNING: pages may be incomplete due to missing templates)</p>"); } else { content.append( "<p>Import has been stopped by the user. Return to <a href=\"wiki?title=" + MAIN_PAGE + "\">" + MAIN_PAGE + "</a>.</p>"); } } page.setNotice(WikiServlet.getParam_notice(request)); page.setError(getParam_error(request)); page.setTitle("Import Wiki dump"); page.setPage(content.toString()); RequestDispatcher dispatcher = request.getRequestDispatcher("page.jsp"); dispatcher.forward(request, response); }