/* -- This is a helper methid to run the morph files */ private static void runMorphDataSet() throws Exception { String morph_directory = "../../thesis-datasets/morph/"; // directory where all the morph code is stored File d = new File(morph_directory); // get all the files from a directory File[] fList = d.listFiles(); List<String> dir_list = new ArrayList<String>(); for (File file : fList) { if (file.isDirectory()) { dir_list.add(file.getName()); } } for (String dir : dir_list) { directory = morph_directory + dir + "/"; System.out.println("Running TDDD " + directory); ReadFile.readFile(directory, fileList); // read the two files System.out.println(fileList.get(0) + " " + fileList.get(1)); preliminaryStep(directory); startCDC(); fileList.clear(); fileArray.clear(); hashed_File_List.clear(); } }
private void safeBeginParagraph() { if (!myParagraphStarted) { myParagraphStarted = true; myBufferIsEmpty = true; beginParagraph(ZLTextParagraph.Kind.TEXT_PARAGRAPH); if (!myParagraphStored) { // final ArrayList models = Model.getBookTextModels(); // myParagraphVector.add(new Pair(((ZLTextPlainModel) // models.get(models.size()-1)/*BookTextModel*/).getParagraphsNumber() - 1, models.size() - // 1)); myParagraphStored = true; } for (Iterator it = myDelayedControls.iterator(); it.hasNext(); ) { Pair pit = (Pair) it.next(); addControl((Byte) pit.myFirst, (Boolean) pit.mySecond); } // if (myForcedEntry != null) { // addControl(myForcedEntry); // } else { addControl(FBTextKind.REGULAR, true); // } for (Iterator it = myDelayedHyperlinks.iterator(); it.hasNext(); ) { addHyperlinkControl(FBTextKind.INTERNAL_HYPERLINK, (String) it.next()); } myDelayedHyperlinks.clear(); } }
private void processTextParagraph(char[] data, int start, int end) { changeFont(FontType.FT_REGULAR); while (popKind()) {} myParagraphStarted = false; myBytesToSkip = 0; int textStart = start; boolean functionFlag = false; for (int ptr = start; ptr < end; ++ptr) { if (data[ptr] == 0) { functionFlag = true; if (ptr > textStart) { safeBeginParagraph(); // myConvertedTextBuffer = "";//.erase(); myConvertedTextBuffer = ""; // myConverter.convert(data, textStart, ptr); addData(myConvertedTextBuffer.toCharArray()); myBufferIsEmpty = false; } } else if (functionFlag) { int paramCounter = (data[ptr]) % 8; if (end - ptr > paramCounter) { processTextFunction(data, ptr); ptr += paramCounter; } else { ptr = end - 1; } functionFlag = false; if (myBytesToSkip > 0) { ptr += myBytesToSkip; myBytesToSkip = 0; } textStart = ptr + 1; } else { if (data[ptr] == 0xA0) { data[ptr] = 0x20; } if (!myParagraphStarted && (textStart == ptr) && (data[ptr] == ' ')) { ++textStart; } } } if (end > textStart) { safeBeginParagraph(); // myConvertedTextBuffer = "";//erase(); myConvertedTextBuffer = ""; // myConverter.convert(data, textStart, end); addData(myConvertedTextBuffer.toCharArray()); myBufferIsEmpty = false; } safeEndParagraph(); // if (myForcedEntry != null) { // myForcedEntry = null; // } myDelayedControls.clear(); }
public boolean readDocument() { try { myStream = new ZLInputStreamWithOffset(myFile.getInputStream()); PdbHeader header = new PdbHeader(myStream); setMainTextModel(); myFont = FontType.FT_REGULAR; for (int index = 0; index < header.Offsets.length; ++index) { int currentOffset = myStream.offset(); int pit = header.Offsets[index]; if (currentOffset > pit) { break; } // myStream.seek(pit - currentOffset, false); myStream.skip(pit - currentOffset); if (myStream.offset() != pit) { break; } int recordSize = ((index != header.Offsets.length - 1) ? header.Offsets[index + 1] : myFileSize) - pit; readRecord(recordSize); } myStream.close(); } catch (IOException e) { return false; } for (Iterator it = myReferencedParagraphs.iterator(); it.hasNext(); ) { Pair pair = (Pair) it.next(); int first = (Integer) pair.myFirst; int second = (Integer) pair.mySecond; ArrayList /*<Integer>*/ list = (ArrayList) myParagraphMap.get(first); if (list != null) { for (int k = second; k < list.size(); ++k) { if (((Integer) ((Pair) list.get(k)).myFirst) != -1) { // addHyperlinkLabel(fromNumber(first) + '#' + fromNumber(second), // (Integer)list.get(k)); final Pair p = (Pair) list.get(k); // addHyperlinkLabel(fromNumber(first) + '#' + fromNumber(second), (Integer) p.mySecond, // (Integer) p.myFirst); break; } } } } myReferencedParagraphs.clear(); myParagraphMap.clear(); return true; }
/** If a key is pressed perform the respective actions */ public void keyPressed() { // Add 'stems' to the balls if (keyCode == SHIFT) { stems = !stems; for (int i = 0; i < balls.size(); i++) { Ball b = (Ball) balls.get(i); b.STEM = stems; } } // toggle repaint background else if (key == 'b') REPAINT = !REPAINT; // Empty the ArrayList of Balls else if (key == 'x') balls.clear(); // Add a ball else if (key == 'f') addBall(); }
String[] getPluginDirectories() { ArrayList<String> directories = new ArrayList<String>(); PackageManager pm = this.mAppContext.getPackageManager(); List<ResolveInfo> plugins = pm.queryIntentServices( new Intent(PLUGIN_ACTION), PackageManager.GET_SERVICES | PackageManager.GET_META_DATA); synchronized (mPackageInfoCache) { // clear the list of existing packageInfo objects mPackageInfoCache.clear(); for (ResolveInfo info : plugins) { // retrieve the plugin's service information ServiceInfo serviceInfo = info.serviceInfo; if (serviceInfo == null) { Log.w(LOGTAG, "Ignore bad plugin"); continue; } Log.w(LOGTAG, "Loading plugin: " + serviceInfo.packageName); // retrieve information from the plugin's manifest PackageInfo pkgInfo; try { pkgInfo = pm.getPackageInfo( serviceInfo.packageName, PackageManager.GET_PERMISSIONS | PackageManager.GET_SIGNATURES); } catch (Exception e) { Log.w(LOGTAG, "Can't find plugin: " + serviceInfo.packageName); continue; } if (pkgInfo == null) { Log.w( LOGTAG, "Loading plugin: " + serviceInfo.packageName + ". Could not load package information."); continue; } /* * find the location of the plugin's shared library. The default * is to assume the app is either a user installed app or an * updated system app. In both of these cases the library is * stored in the app's data directory. */ String directory = pkgInfo.applicationInfo.dataDir + "/lib"; final int appFlags = pkgInfo.applicationInfo.flags; final int updatedSystemFlags = ApplicationInfo.FLAG_SYSTEM | ApplicationInfo.FLAG_UPDATED_SYSTEM_APP; // preloaded system app with no user updates if ((appFlags & updatedSystemFlags) == ApplicationInfo.FLAG_SYSTEM) { directory = PLUGIN_SYSTEM_LIB + pkgInfo.packageName; } // check if the plugin has the required permissions String permissions[] = pkgInfo.requestedPermissions; if (permissions == null) { Log.w( LOGTAG, "Loading plugin: " + serviceInfo.packageName + ". Does not have required permission."); continue; } boolean permissionOk = false; for (String permit : permissions) { if (PLUGIN_PERMISSION.equals(permit)) { permissionOk = true; break; } } if (!permissionOk) { Log.w( LOGTAG, "Loading plugin: " + serviceInfo.packageName + ". Does not have required permission (2)."); continue; } // check to ensure the plugin is properly signed Signature signatures[] = pkgInfo.signatures; if (signatures == null) { Log.w(LOGTAG, "Loading plugin: " + serviceInfo.packageName + ". Not signed."); continue; } // determine the type of plugin from the manifest if (serviceInfo.metaData == null) { Log.e(LOGTAG, "The plugin '" + serviceInfo.name + "' has no type defined"); continue; } String pluginType = serviceInfo.metaData.getString(PLUGIN_TYPE); if (!TYPE_NATIVE.equals(pluginType)) { Log.e(LOGTAG, "Unrecognized plugin type: " + pluginType); continue; } try { Class<?> cls = getPluginClass(serviceInfo.packageName, serviceInfo.name); // TODO implement any requirements of the plugin class here! boolean classFound = true; if (!classFound) { Log.e( LOGTAG, "The plugin's class' " + serviceInfo.name + "' does not extend the appropriate class."); continue; } } catch (NameNotFoundException e) { Log.e(LOGTAG, "Can't find plugin: " + serviceInfo.packageName); continue; } catch (ClassNotFoundException e) { Log.e(LOGTAG, "Can't find plugin's class: " + serviceInfo.name); continue; } // if all checks have passed then make the plugin available mPackageInfoCache.add(pkgInfo); directories.add(directory); } } return directories.toArray(new String[directories.size()]); }
/* - This method is used has a helper method to run the algo for the archive dataset - Note the archive set has multiple directories ( one for each url ) - So Read all of the directories in first and for each directory run the code */ private static void runArchiveSet() throws Exception { System.out.println("Running TDDD archive"); directory = "../../thesis-datasets/datasets2/"; File file = new File(directory); String[] directory_list = file.list( new FilenameFilter() { @Override public boolean accept(File current, String name) { return new File(current, name).isDirectory(); // make sure its a directory } }); int totalRuns = 0; // used to avg the runs in the end int total_iter_count = 0; // this is used check how many times we will iterate through the data so we can make an // array of that size for (int i = startBoundary; i <= endBoundary; i += increment) total_iter_count++; // System.out.println(Arrays.toString(directory_list)); int sets = 0; // make the arrays to hold the respecitve info for the different verions\ // run it simulateounsly to speed the from the program! double[] block_size_list_last_year = new double[total_iter_count]; double[] ratio_size_list_last_year = new double[total_iter_count]; double[] block_size_list_six_month = new double[total_iter_count]; double[] ratio_size_list__six_month = new double[total_iter_count]; double[] block_size_list_two_year = new double[total_iter_count]; double[] ratio_size_list_two_year = new double[total_iter_count]; int current = 0; int six_month = 2; int last_year = 1; int two_year = 3; // loop through and run the cdc for each directory for (String dir : directory_list) { ReadFile.readFile(directory + dir, fileList); // read all the files in this directory preliminaryStep(directory + dir + "/"); // call the preliminaryStep on all the files totalRuns++; totalSize = fileArray.get(current) .length; // get the length of the file we will be running it against! // run it against six month startCDC( block_size_list_six_month, ratio_size_list__six_month, fileArray.get(current), fileArray.get(six_month), hashed_File_List.get(current), hashed_File_List.get(six_month)); // run it against last year startCDC( block_size_list_last_year, ratio_size_list_last_year, fileArray.get(current), fileArray.get(last_year), hashed_File_List.get(current), hashed_File_List.get(last_year)); // run it against 2 startCDC( block_size_list_two_year, ratio_size_list_two_year, fileArray.get(current), fileArray.get(two_year), hashed_File_List.get(current), hashed_File_List.get(two_year)); // // clear the fileList and hashed_file_list array fileArray.clear(); hashed_File_List.clear(); fileList.clear(); // if (Double.isNaN(ratio_size_list[0])){ // System.out.println(sets+" "+Arrays.toString(ratio_size_list)); // test = true; // break; // } if (sets % 200 == 0) System.out.println(sets); ++sets; } // end of directory list for loop // now output the avged value for all the runs // System.out.println(Arrays.toString(ratio_size_list)); System.out.println("Printing six_month"); int index = 0; for (int i = startBoundary; i <= endBoundary; i += increment) { // avg out the outputs double blockSize = block_size_list_six_month[index] / (double) totalRuns; double ratio = ratio_size_list__six_month[index] / (double) totalRuns; System.out.println(i + " " + i / 2 + 1 + " " + i / 4 + 1 + " " + blockSize + " " + ratio); index++; } System.out.println("Printing last year"); index = 0; for (int i = startBoundary; i <= endBoundary; i += increment) { double blockSize = block_size_list_last_year[index] / (double) totalRuns; double ratio = ratio_size_list_last_year[index] / (double) totalRuns; System.out.println(i + " " + blockSize + " " + ratio); index++; } System.out.println("Printing two year"); index = 0; for (int i = startBoundary; i <= endBoundary; i += increment) { double blockSize = block_size_list_two_year[index] / (double) totalRuns; double ratio = ratio_size_list_two_year[index] / (double) totalRuns; System.out.println(i + " " + blockSize + " " + ratio); index++; } }
public void reset() { intTimeIdx = -1; alObjectStateArchive.clear(); }