public static void main(String[] args) { byte[] T, U, V; int[] A; int i, j, n, pidx; long start, finish; for (i = 0; i < args.length; ++i) { System.out.print(args[i] + ": "); try { /* Open a file for reading. */ File f = new File(args[i]); FileInputStream s = new FileInputStream(f); n = (int) f.length(); System.out.print(n + " bytes ... "); /* Allocate 5n bytes of memory. */ T = new byte[n]; U = new byte[n]; V = new byte[n]; A = new int[n]; /* Read n bytes of data. */ s.read(T); s.close(); s = null; f = null; /* Construct the suffix array. */ start = new Date().getTime(); pidx = new sais().bwtransform(T, U, A, n); finish = new Date().getTime(); System.out.println(((finish - start) / 1000.0) + " sec"); System.out.print("unbwtcheck ... "); unbwt(U, V, A, n, pidx); for (j = 0; j < n; ++j) { if (T[j] != V[j]) { System.err.println("error " + j + ": " + T[j] + ", " + V[j]); return; } } System.err.println("Done."); T = null; U = null; V = null; A = null; } catch (IOException e) { e.printStackTrace(); } catch (OutOfMemoryError e) { e.printStackTrace(); } } }
void readFrom(ObjectInputStream in) throws Exception { wordClasses = (WordClass[]) in.readObject(); wordClassMap = (Map) in.readObject(); skipHeader = (int[]) in.readObject(); values = (AttributeValue[]) in.readObject(); interpretationCountBits = in.readInt(); interpretationLengthBits = in.readInt(); valueCountBits = in.readInt(); valuePointerBits = in.readInt(); wordClassBits = in.readInt(); wordClassCount = in.readInt(); maxFreeValueCount = in.readInt(); int entryCount = in.readInt(); entries = new DictionaryEntry[entryCount]; hashCodes = new int[entryCount]; Runtime rt = Runtime.getRuntime(); int i = 0; try { UNKNOWN = DictionaryEntry.readDictionaryEntryFrom(in); UNKNOWN_WORD_UPPER = DictionaryEntry.readDictionaryEntryFrom(in); UNKNOWN_WORD_LOWER = DictionaryEntry.readDictionaryEntryFrom(in); CHAR = DictionaryEntry.readDictionaryEntryFrom(in); NUMBER = DictionaryEntry.readDictionaryEntryFrom(in); for (i = 0; i < entryCount; i++) { hashCodes[i] = in.readInt(); entries[i] = DictionaryEntry.readDictionaryEntryFrom(in); } } catch (OutOfMemoryError oem) { oem.printStackTrace(); throw new Exception(oem.getMessage()); } interpreterPool = new Stack(); tokenizedInputExtensionPool = new Stack(); checkBuild(); }
public void run() { resetManager.registerMe(); try { // Create a thread context object. IThreadContext threadContext = ThreadContextFactory.make(); IJobManager jobManager = JobManagerFactory.make(threadContext); IRepositoryConnectionManager connectionMgr = RepositoryConnectionManagerFactory.make(threadContext); IDBInterface database = DBInterfaceFactory.make( threadContext, ManifoldCF.getMasterDatabaseName(), ManifoldCF.getMasterDatabaseUsername(), ManifoldCF.getMasterDatabasePassword()); // Loop while (true) { // Do another try/catch around everything in the loop try { // Before we begin, conditionally reset resetManager.waitForReset(threadContext); // Accumulate the wait before doing the next check. // We start with 10 seconds, which is the maximum. If there's a service request // that's faster than that, we'll adjust the time downward. long waitTime = 10000L; if (Logging.threads.isDebugEnabled()) Logging.threads.debug("Checking for deleting jobs"); // See if there are any starting jobs. // Note: Since this following call changes the job state, we must be careful to reset it // on any kind of failure. JobDeleteRecord[] deleteJobs = jobManager.getJobsReadyForDelete(processID); try { if (deleteJobs.length == 0) { ManifoldCF.sleep(waitTime); continue; } if (Logging.threads.isDebugEnabled()) Logging.threads.debug( "Found " + Integer.toString(deleteJobs.length) + " jobs ready to be deleted"); long currentTime = System.currentTimeMillis(); // Loop through jobs int i = 0; while (i < deleteJobs.length) { JobDeleteRecord jsr = deleteJobs[i++]; Long jobID = jsr.getJobID(); try { jobManager.prepareDeleteScan(jobID); // Start deleting this job! jobManager.noteJobDeleteStarted(jobID, currentTime); jsr.noteStarted(); } catch (ManifoldCFException e) { if (e.getErrorCode() == ManifoldCFException.INTERRUPTED) throw new InterruptedException(); if (e.getErrorCode() == ManifoldCFException.DATABASE_CONNECTION_ERROR) throw e; // We cannot abort the delete startup, but if we fall through, we'll put the job // back into // the state whence it came. So, fall through. Logging.threads.error("Exception tossed: " + e.getMessage(), e); } } } finally { // Clean up all jobs that did not start ManifoldCFException exception = null; int i = 0; while (i < deleteJobs.length) { JobDeleteRecord jsr = deleteJobs[i++]; if (!jsr.wasStarted()) { // Clean up from failed start. try { jobManager.resetStartDeleteJob(jsr.getJobID()); } catch (ManifoldCFException e) { exception = e; } } } if (exception != null) throw exception; } // Sleep for the retry interval. ManifoldCF.sleep(waitTime); } catch (ManifoldCFException e) { if (e.getErrorCode() == ManifoldCFException.INTERRUPTED) break; if (e.getErrorCode() == ManifoldCFException.DATABASE_CONNECTION_ERROR) { resetManager.noteEvent(); Logging.threads.error( "Start delete thread aborting and restarting due to database connection reset: " + e.getMessage(), e); try { // Give the database a chance to catch up/wake up ManifoldCF.sleep(10000L); } catch (InterruptedException se) { break; } continue; } // Log it, but keep the thread alive Logging.threads.error("Exception tossed: " + e.getMessage(), e); if (e.getErrorCode() == ManifoldCFException.SETUP_ERROR) { // Shut the whole system down! System.exit(1); } } catch (InterruptedException e) { // We're supposed to quit break; } catch (OutOfMemoryError e) { System.err.println("agents process ran out of memory - shutting down"); e.printStackTrace(System.err); System.exit(-200); } catch (Throwable e) { // A more severe error - but stay alive Logging.threads.fatal("Error tossed: " + e.getMessage(), e); } } } catch (Throwable e) { // Severe error on initialization System.err.println("agents process could not start - shutting down"); Logging.threads.fatal("StartDeleteThread initialization error tossed: " + e.getMessage(), e); System.exit(-300); } }