private void dumpDocuments() throws IOException { outputBanner("Documents"); int totalDocs = mIndexReader.numDocs(); outputLn(); outputLn("There are " + totalDocs + " documents in this index."); mConsole.debug("Total number of documents: " + totalDocs); for (int i = 0; i < totalDocs; i++) { Document doc = null; try { doc = mIndexReader.document(i, null); } catch (IllegalArgumentException e) { if ("attempt to access a deleted document".equals(e.getMessage())) { mConsole.warn( "encountered exception while dumping document " + i + ": " + e.getMessage()); } else { throw e; } } dumpDocument(i, doc); if ((i + 1) % 100 == 0) { mConsole.debug("Dumped " + (i + 1) + " documents"); } } }
/** * For "iteration" number of cycles, generate user-visit events within the bounds of customer * range and user range. */ public void run() { final String meth = "run()"; ILdtOperations ldtOps = dbOps.getLdtOps(); long startTimeMs = System.currentTimeMillis(); long endTimeMS = startTimeMs + 1000 * emulationDays * 86400; long checkTimeMS = 0; long secondStartMS = 0; long deltaTimeMS = 0; int interTransactionWaitMS = 10; long secondCount = 1; long opNum = 0; try { // This thread will try to maintain a given TPS load, where it will // measure its performance per second and try to speed up or slow // down accordingly. // In each second, it will attempt to complete "threadTPS" actions // by calling the "working function" doOperation(). console.info( "<%s:%s> ThreadNum(%d) Start: ThreadTPS(%d)", CLASSNAME, meth, threadNumber, threadTPS); do { // We're going to organize ourselves in terms of TPS and // second intervals. For a given number of TPS, we're going // to try to fit in that amount of operations, and then sleep // for the remaining amount if we're ahead of schedule in each // second. If we fall behind, then we're going to shorten the // amount of time between each operation. secondStartMS = System.currentTimeMillis(); for (int i = 0; i < threadTPS; i++) { opNum = (secondCount * threadTPS) + i; doOperation((int) opNum, ldtOps); Thread.sleep(interTransactionWaitMS); } // for each Transaction (per second) secondCount++; checkTimeMS = System.currentTimeMillis(); deltaTimeMS = checkTimeMS - secondStartMS; console.debug( "StartSecond(%d) CheckTime(%d) Delta(%d)", secondStartMS, checkTimeMS, deltaTimeMS); if (deltaTimeMS > 1000) { if (interTransactionWaitMS > 10) { interTransactionWaitMS--; } // otherwise, do nothing. } else { interTransactionWaitMS++; Thread.sleep(Math.abs(1000 - deltaTimeMS)); } } while (checkTimeMS < endTimeMS); // end for each generateCount } catch (Exception e) { e.printStackTrace(); console.error("<%s:%s>Problem with Thread(%d) ", CLASSNAME, meth, opNum); } } // end run()
public static String getUniquePath() { String unique = System.getProperty("java.io.tmpdir") + UUID.randomUUID().toString(); File f; try { f = new File(unique); unique = f.getCanonicalPath(); } catch (IOException ex) { Console.error("Strange file error.\n" + ex); } Console.debug("Generated unique path: " + unique, 3); return unique; }
public void run() { try { StunAddress localAddr = null; StunAddress serverAddr = null; localAddr = new StunAddress(InetAddress.getLocalHost(), 5678); serverAddr = new StunAddress("stun01bak.sipphone.com.", 3479); NetworkConfigurationDiscoveryProcess addressDiscovery = new NetworkConfigurationDiscoveryProcess(localAddr, serverAddr); addressDiscovery.start(); StunDiscoveryReport report = addressDiscovery.determineAddress(); if (console.isDebugEnabled()) console.debug("Result of NetworkDiagnosticts:\n" + report); } catch (Throwable exc) // catch everything // this is a diagnostic kit only so we don't want it // to spoil our actual application { console.error( "The network diagnostics process has failed with " + "the following exception", exc); } }
private void dumpTerms() throws IOException { outputBanner("Terms (in Term.compareTo() order)"); TermEnum terms = mIndexReader.terms(); int order = 0; while (terms.next()) { order++; Term term = terms.term(); String field = term.field(); String text = term.text(); if (!wantThisTerm(field, text)) { continue; } outputLn(order + " " + field + ": " + text); /* * for each term, print the * <document, frequency, <position>* > tuples for a term. * * document: document in which the Term appears * frequency: number of time the Term appears in the document * position: position for each appearance in the document * * e.g. doc.add(new Field("field", "one two three two four five", Field.Store.YES, Field.Index.ANALYZED)); * then the tuple for Term("field", "two") in this document would be like: * 88, 2, <2, 4> * where * 88 is the document number * 2 is the frequency this term appear in the document * <2, 4> are the positions for each appearance in the document */ // by TermPositions outputLn(" document, frequency, <position>*"); // keep track of docs that appear in all terms that are filtered in. Set<Integer> docNums = null; if (hasFilters()) { docNums = new HashSet<Integer>(); } TermPositions termPos = mIndexReader.termPositions(term); while (termPos.next()) { int docNum = termPos.doc(); int freq = termPos.freq(); if (docNums != null) { docNums.add(docNum); } output(" " + docNum + ", " + freq + ", <"); boolean first = true; for (int f = 0; f < freq; f++) { int positionInDoc = termPos.nextPosition(); if (!first) { output(" "); } else { first = false; } output(positionInDoc + ""); } outputLn(">"); } termPos.close(); if (docNums != null) { computeDocsIntersection(docNums); } outputLn(); if (order % 1000 == 0) { mConsole.debug("Dumped " + order + " terms"); } } terms.close(); }
// returns false if options are not valid public static void parseArgs(String[] args) { // https://www.karlin.mff.cuni.cz/network/prirucky/javatut/java/cmdLineArgs/parsing.html String arg; int argPos = 0; while ((argPos < args.length) && args[argPos].startsWith("-")) { arg = args[argPos++]; if (arg.startsWith("-v") || arg.startsWith("--verbose")) { String level = arg.startsWith("-v") ? arg.replace("-v", "") : arg.replace("--verbose", ""); VerboseLevel = level.isEmpty() ? 1 : Integer.parseInt(level); if (VerboseLevel > MaxVerboseLevel) { VerboseLevel = MaxVerboseLevel; } else if (VerboseLevel < 1) { VerboseLevel = 1; } Console.VerboseLevel = VerboseLevel; Console.debug("Option: Verbose level set to " + VerboseLevel); } else if (arg.equals("-s") || arg.equals("--skip-assembly")) { Console.debug("Option: Skipping assembly."); SkipAssembly = true; } else if (arg.equals("--skip-cleanup")) { Console.debug("Option: Skipping cleanup."); SkipCleanup = true; } else if (arg.equals("--decode-res")) { Console.debug("Option: Decoding resources."); DecodeResources = true; } else if (arg.equals("-d") || arg.equals("--detect-only")) { Console.debug("Option: Determining protection information only."); DetectOnly = true; } else if (arg.equals("-f") || arg.equals("--force")) { Console.debug("Option: Allowing file overwrites."); AllowOverwrites = true; } else if (arg.equals("--sign-only")) { Console.debug("Option: Signing only."); SignOnly = true; AllowOverwrites = true; // disable any checking, wont be needed SkipCleanup = true; } else if (arg.equals("--sign-key")) { arg = args[argPos++]; SignKey = new File(arg); if (!SignKey.exists()) { Console.die("Signing key does not exist: " + arg); } } else if (arg.equals("--sign-cert")) { arg = args[argPos++]; SignCert = new File(arg); if (!SignCert.exists()) { Console.die("Signing certificate does not exist: " + arg); } } else if (arg.equals("--sign-pass")) { SignPass = args[argPos++]; } else if (arg.equals("--info-only")) { Console.debug("Option: Getting info only."); InfoOnly = true; AllowOverwrites = true; // disable any checking, wont be needed } else if (arg.equals("--assemble-only")) { Console.debug("Option: Assembling only."); AssembleOnly = true; } else if (arg.equals("--fplist")) { ListFPsOnly = true; } else if (arg.equals("--fpexclude")) { arg = args[argPos++]; String fps[] = arg.split(","); for (String fp : fps) { ExcludedFPs.add(fp.trim()); } Console.debug("Option: Excluding fingerprints: " + arg); } else if (arg.equals("--fpinclude")) { arg = args[argPos++]; String fps[] = arg.split(","); for (String fp : fps) { IncludedFPs.add(fp.trim()); } Console.debug("Option: Including fingerprints: " + arg); } else if (arg.equals("--chksigs")) { arg = args[argPos++]; CheckSigsBehavior = Integer.parseInt(arg); } else if (arg.equals("--getpi")) { arg = args[argPos++]; GetPIBehavior = Integer.parseInt(arg); } else if (arg.equals("--sigvfy")) { arg = args[argPos++]; SigVerifyBehavior = Integer.parseInt(arg); } else if (arg.equals("--spoof-id")) { arg = args[argPos++]; DeviceIDSpoofType = Integer.parseInt(arg); // These are enabled=false by default IncludedFPs.add("Hook Get Secure Setting"); IncludedFPs.add("Hook Device ID"); if (DeviceIDSpoofType == 5) { DeviceIDSpoof = args[argPos++]; // device id must be 15 characters, numeric only if (!DeviceIDSpoof.matches("\\d{15}")) { Console.die("Spoofed device ID is must be 15 digits.", -1); } } } else if (arg.equals("--spoof-account")) { arg = args[argPos++]; AccountNameSpoofType = Integer.parseInt(arg); // These are enabled=false by default IncludedFPs.add("Hook Get Account Name"); if (AccountNameSpoofType == 3) { AccountNameSpoof = args[argPos++].toUpperCase(); if (!AccountNameSpoof.matches("[a-zA-Z0-9\\.]+")) { Console.die("Spoofed Account Name must be alpha-numeric!", -1); } } } else if (arg.equals("--spoof-wifimac")) { arg = args[argPos++]; WifiMacSpoofType = Integer.parseInt(arg); // These are enabled=false by default IncludedFPs.add("Hook Get Wifi Mac"); if (WifiMacSpoofType == 3) { WifiMacSpoof = args[argPos++].toUpperCase(); if (!WifiMacSpoof.matches("(?m)^([0-9A-F]{2}([:-]|$)){6}$")) { Console.die("Spoofed Wifi MAC must be of the form 11:22:33:AA:BB:CC !", -1); } } } else if (arg.equals("--spoof-btmac")) { arg = args[argPos++]; BTMacSpoofType = Integer.parseInt(arg); // These are enabled=false by default IncludedFPs.add("Hook Bluetooth MAC"); if (BTMacSpoofType == 3) { BTMacSpoof = args[argPos++].toUpperCase(); if (!BTMacSpoof.matches("(?m)^([0-9A-F]{2}([:-]|$)){6}$")) { Console.die("Spoofed BT MAC must be of the form 11:22:33:AA:BB:CC !", -1); } } } else if (arg.equals("--spoof-model")) { arg = args[argPos++]; // This is enabled=false by default IncludedFPs.add("Hook Device Model"); Options.DeviceModelSpoof = arg; Console.debug("Spoofing model as " + Options.DeviceModelSpoof); } else if (arg.equals("--spoof-network")) { arg = args[argPos++]; // This is enabled=false by default IncludedFPs.add("Hook Network Operator"); Options.NetworkOperatorSpoof = arg; Console.debug("Spoofing network operator as " + Options.NetworkOperatorSpoof); } else if (arg.equals("--spoof-manufacturer")) { arg = args[argPos++]; // This is enabled=false by default IncludedFPs.add("Hook Device Manufacturer"); Options.DeviceManufacturerSpoof = arg; Console.debug("Spoofing manufacturer as " + Options.DeviceManufacturerSpoof); } else if (arg.equals("--key-apk")) { KeyApkPath = args[argPos++]; File f = new File(KeyApkPath); if (!f.exists()) { Console.die("The key apk " + KeyApkPath + " does not exist!"); } } else if (arg.equals("--trace")) { Console.debug("Option: Enabling method trace and debug hooks."); IncludedFPs.add("Method Trace"); IncludedFPs.add("Method Trace FixLocals"); DebugHooks = true; } else if (arg.equals("--translate")) { Console.debug("Option: Enabling Smali string language translation."); SmaliHinter.enableTranslations(); } else if (arg.equals("-h") || arg.equals("--help")) { showUsage(); System.exit(0); } else if (arg.equals("--dbghooks")) { Console.debug("Option: Using debugging hooks."); DebugHooks = true; } else if (arg.equals("--skip-hints")) { Console.debug("Option: Skipping smali hint generation."); SkipHints = true; } else { Console.die("Unknown option: " + arg + ".", -1); } } // No bother processing the rest of the logic, we just want FP listing if (ListFPsOnly) { return; } // If either is alone, no bueno! if ((SignKey != null) ^ (SignKey != null)) { Console.die("Options --sign-key and --sign-cert must be used together."); } if (argPos == args.length) { Console.error("Oopsy! Dump path / Apk file missing. Please review:"); showUsage(); System.exit(-1); } // Make sure smali dump / apk exists File input = ensureExists(args[argPos]); if (input.isDirectory()) { if (SignOnly) { Console.die("Sign only option enabled but input is not an APK file."); } SmaliDir = input.getPath(); if (!SkipAssembly) { if ((argPos + 1) >= args.length) { Console.die("Output APK required."); } OutputApk = args[argPos + 1]; ensureExists(OutputApk); } } else { if (AssembleOnly) { Console.die("Option --assemble-only only works with smali dump directories."); } ApkPath = input.getPath(); OutputApk = ApkPath.replace(".apk", ""); if (args.length <= (argPos + 1)) { // overwrite original by default if (SignOnly || DetectOnly || SkipAssembly || InfoOnly) { OutputApk = ApkPath; } else { OutputApk += "_sequenced.apk"; } } else { OutputApk = args[argPos + 1]; } } Console.debug("Output apk: " + OutputApk); Console.debug("Apk path: " + ApkPath); if (!DetectOnly && !SkipAssembly) { enforceOverwrite(OutputApk, "output file"); } }
/** * Do the main operation in emulate Mode: (1) Write a new URL Site Visit Record to the base DB (2) * Check to see if the corresponding User Record is in the Segmented Cache -- If not, write a new * User Record with the CACHE TTL -- UPDATE THE USER RECORD WITH A NEW (FULL) LDT Load (Scan the * existing LDT in the base DB and write to the cache LDT) (3) Probe the Base DB to see if the LDT * data is consistent. * * @param opNum */ private void doOperation(int opNum, ILdtOperations ldtOps) { final String meth = "doOperation()"; boolean recordPresent = false; String baseSet; String cacheSet; try { int customerSeed = random.nextInt(this.customerMax); CustomerRecord custRec = new CustomerRecord(console, customerSeed); long userSeed = getUserRecordSeed(this.userMax); UserRecord userRec = new UserRecord(console, dbOps, custRec.getCustomerID(), (int) userSeed); SiteVisitEntry sve = new SiteVisitEntry( console, custRec.getCustomerID(), userRec.getUserID(), opNum, LDT_BIN, this.timeToLive); baseSet = userRec.getCustomerBaseSet(); cacheSet = userRec.getCustomerCacheSet(); // Write the Site Visit to Storage -- which is hidden behind // this interface because there can be multiple implementations // of the LDT. sve.toStorage(client, baseNamespace, baseSet, ldtOps); // Check to see if the UserRecord is in the Segment Cache. If it is, // then add to the Cache LDT. If it is not, then create a new // User Record in the Segment, and populate the LDT Info (the Site // Visit Data) with the LDT data from the DB User Record. recordPresent = userRec.updateCache(client, cacheNamespace); if (recordPresent) { sve.toStorage(client, cacheNamespace, cacheSet, ldtOps); } else { sve.reloadCache(client, baseNamespace, cacheNamespace, ldtOps); } String keyStr = userRec.getUserID(); // At predetermined milestones, perform various actions // Show Simple Status at regular internals. For the regular large // scale tests with 100 threads, we won't hit this very often. if ((opNum + threadNumber) % 1000 == 0) { console.debug( "<%s:%s> Thread(%d) Cust#(%d) BaseSet(%s) User#(%d) UserID(%s) Iteration(%d)", CLASSNAME, meth, threadNumber, customerSeed, baseSet, userSeed, keyStr, opNum); } // Do a heavy duty scan less frequently. if ((opNum + threadNumber) % 2000 == 0) { Key baseKey = new Key(baseNamespace, baseSet, keyStr); Key cacheKey = new Key(cacheNamespace, cacheSet, keyStr); console.debug( "<%s:%s> <<SCAN TEST>> Thread(%d) Cust#(%d) BaseSet(%s) CacheSet(%s) User#(%d) UserID(%s) Iteration(%d)", CLASSNAME, meth, threadNumber, customerSeed, baseSet, cacheSet, userSeed, keyStr, opNum); List<Map<String, Object>> baseResult = null; List<Map<String, Object>> cacheResult = null; int baseResultSize = 0; int baseCheckSize = 0; int cacheResultSize = 0; int cacheCheckSize = 0; try { baseResult = ldtOps.scanLDT(baseKey); cacheResult = ldtOps.scanLDT(cacheKey); cacheCheckSize = ldtOps.ldtSize(cacheKey, LDT_BIN); if (baseResult != null) { baseResultSize = baseResult.size(); baseCheckSize = ldtOps.ldtSize(baseKey, LDT_BIN); if (baseResultSize != baseCheckSize) { console.error( "<%s:%s> <<BASE SCAN Size Error>> Thread(%d) Cust#(%d) BaseSet(%s) UserID(%s) ScanSide(%d) LDT Size(%d)", CLASSNAME, meth, threadNumber, customerSeed, baseSet, keyStr, baseResultSize, baseCheckSize); } } if (cacheResult != null) { cacheResultSize = cacheResult.size(); cacheCheckSize = ldtOps.ldtSize(cacheKey, LDT_BIN); if (cacheResultSize != cacheCheckSize) { console.error( "<%s:%s> <<CACHE SCAN Size Error>> Thread(%d) Cust#(%d) CacheSet(%s) UserID(%s) ScanSide(%d) LDT Size(%d)", CLASSNAME, meth, threadNumber, customerSeed, cacheSet, keyStr, cacheResultSize, cacheCheckSize); } } } catch (AerospikeException ae) { console.error( "Aerospike Error Code(%d) Error Message(%s)", ae.getResultCode(), ae.getMessage()); console.info("Keep on Truckin"); } console.debug( "<%s:%s> <<SCAN RESULT>> Thread(%d) Cust#(%d) BaseSet(%s) CacheSet(%s) UserID(%s) BaseLDT(%d) CacheLDT(%d)", CLASSNAME, meth, threadNumber, customerSeed, baseSet, cacheSet, keyStr, baseResultSize, cacheResultSize); } } catch (AerospikeException ae) { console.error( "Aerospike Error Code(%d) Error Message(%s)", ae.getResultCode(), ae.getMessage()); } catch (Exception e) { e.printStackTrace(); console.error( "[%s] Problem with Thread(%d) Customer Record: Seed(%d)", "Emulate: doOperation(): ", threadNumber, opNum); } }