Beispiel #1
0
  @Before
  public void setUp() throws IOException { // complete input.txt
    try {

      String content = "{()}";
      String contentFormatted = "{\n ( ) \n}";

      File file = new File(fileName);

      // if file doesnt exists, then create it and complete
      if (!file.exists()) {
        file.createNewFile();
        java.io.FileWriter fw = new java.io.FileWriter(file.getAbsoluteFile());
        BufferedWriter bw = new BufferedWriter(fw);
        bw.write(content);
        bw.close();
      }

      File fileOut = new File(fileNameOutR);
      // if fileOut doesnt exists, then create it and complete
      if (!fileOut.exists()) {
        fileOut.createNewFile();
        java.io.FileWriter fwOut = new java.io.FileWriter(fileOut.getAbsoluteFile());
        BufferedWriter bwOut = new BufferedWriter(fwOut);
        bwOut.write(contentFormatted);
        bwOut.close();
      }

    } catch (IOException e) {
      e.printStackTrace();
    }
  }
  @Test
  public void testRedirectionCycle() throws IOException, XMLStreamException {
    // source dump
    File tmpSrcDump = File.createTempFile("wiki-src-dump", "xml");
    File tmpTargetDump = File.createTempFile("wiki-target-dump", "xml");

    // base structure - "<mediawiki><page><title></title><id><id></page></mediawiki>"
    BufferedWriter bw = new BufferedWriter(new FileWriter(tmpSrcDump));
    bw.write(
        "<mediawiki>"
            + "<page>"
            + "<title>Test1</title>"
            + "<id>1</id>"
            + "</page>"
            + "<page>"
            + "<title>Test2</title>"
            + "<id>2</id>"
            + "</page>"
            + "<page>"
            + "<title>Test3</title>"
            + "<id>3</id>"
            + "</page>"
            + "</mediawiki>");
    bw.close();

    bw = new BufferedWriter(new FileWriter(tmpTargetDump));
    bw.write(
        "<mediawiki>"
            + "<page>"
            + "<title>Test1</title>"
            + "<id>1</id>"
            + "<redirect/>"
            + "<revision>"
            + "<id>1234556</id>"
            + "<text xml:space=\"preserve\">#REDIRECT [[Test3]] {{R from CamelCase}}</text>"
            + "</revision>"
            + "</page>"
            + "<page>"
            + "<title>NEW_Test2</title>"
            + "<id>2</id>"
            + "</page>"
            + "<page>"
            + "<title>Test3</title>"
            + "<id>3</id>"
            + "<redirect/>"
            + "<revision>"
            + "<id>1234556</id>"
            + "<text xml:space=\"preserve\">#REDIRECT [[Test1]] {{R from CamelCase}}</text>"
            + "</revision>"
            + "</page>"
            + "</mediawiki>");
    bw.close();
    Map<String, String> hshResults = WikipediaRevisionMapper.map(tmpSrcDump, tmpTargetDump);
    assertEquals(3, hshResults.size());
    // Test1 REDIRECTS TO Test3, but since it forms a cycle, Test1 is mapped to itself
    assertEquals(true, !hshResults.get("Test1").equals("Test3"));

    tmpSrcDump.delete();
    tmpTargetDump.delete();
  }
Beispiel #3
0
 /** Perform the mapping. */
 public void process() {
   for (ClassDescriptor cld : model.getClassDescriptors()) {
     String cldName = cld.getName();
     if (!"org.intermine.model.InterMineObject".equals(cldName)) {
       String pkg = TypeUtil.packageName(cldName);
       String cls = TypeUtil.unqualifiedName(cld.getName());
       String separator = File.separator;
       // Escape windows path seperator
       if ("\\".equals(separator)) {
         separator = "\\\\";
       }
       File dir = new File(file, pkg.replaceAll("[.]", separator));
       dir.mkdirs();
       File path = new File(dir, cls + ".java");
       try {
         path.delete();
         BufferedWriter fos = new BufferedWriter(new FileWriter(path, true));
         fos.write(generate(cld, false));
         fos.close();
         if (cld.isInterface()) {
           path = new File(dir, cls + "Shadow.java");
           path.delete();
           fos = new BufferedWriter(new FileWriter(path, true));
           fos.write(generate(cld, true));
           fos.close();
         }
       } catch (IOException e) {
         throw new RuntimeException("Error creating java", e);
       }
     }
   }
 }
Beispiel #4
0
  public static void toLibSVM(List<TrainingSample<BxZoneLabel>> trainingElements, String filePath)
      throws IOException {
    BufferedWriter svmDataFile = null;
    try {
      FileWriter fstream = new FileWriter(filePath);
      svmDataFile = new BufferedWriter(fstream);
      for (TrainingSample<BxZoneLabel> elem : trainingElements) {
        if (elem.getLabel() == null) {
          continue;
        }
        svmDataFile.write(String.valueOf(elem.getLabel().ordinal()));
        svmDataFile.write(" ");

        Integer featureCounter = 1;
        for (Double value : elem.getFeatureVector().getValues()) {
          StringBuilder sb = new StringBuilder();
          Formatter formatter = new Formatter(sb, Locale.US);
          formatter.format("%d:%.5f", featureCounter++, value);
          svmDataFile.write(sb.toString());
          svmDataFile.write(" ");
        }
        svmDataFile.write("\n");
      }
      svmDataFile.close();
    } catch (Exception e) {
      System.err.println("Error: " + e.getMessage());
      return;
    } finally {
      if (svmDataFile != null) {
        svmDataFile.close();
      }
    }

    System.out.println("Done.");
  }
  private void CriticalSection() {
    csCount++; // Count number of satisfied CS entry requests

    // Do some activity in Cs. Write to Log file
    Date d = new Date();
    System.out.println("Node " + mSelfNodeID + "entering Cs at " + d.getTime());
    try {
      File file = new File("cstest.txt");
      FileWriter fw = new FileWriter(file, true);
      BufferedWriter bw = new BufferedWriter(fw);
      bw.write(mSelfNodeID + "e");
      bw.close();
      Thread.sleep(1000);

    } catch (Exception e) {
      e.printStackTrace();
    }

    try {
      File file = new File("cstest.txt");
      FileWriter fw = new FileWriter(file, true);
      BufferedWriter bw = new BufferedWriter(fw);
      bw.write("\n" + mSelfNodeID + "x\n");
      bw.close();
    } catch (Exception e) {
      System.out.println(e);
    }
    System.out.println("Node " + mSelfNodeID + " exiting CS");
  }
 private void writeHistory(int historyNum) throws IOException {
   if (historyNum <= MAX_HISTORY_ENTRIES) {
     File historyFile = getHistoryFile(false);
     BufferedWriter writer = new BufferedWriter(new FileWriter(historyFile));
     try {
       for (String historyEntry : history) {
         writer.write(historyEntry);
         writer.newLine();
       }
     } finally {
       writer.flush();
       writer.close();
     }
   } else {
     File historyFile = getHistoryFile(false);
     BufferedWriter writer = new BufferedWriter(new FileWriter(historyFile));
     try {
       for (String historyEntry :
           history.subList(historyNum - MAX_HISTORY_ENTRIES - 1, historyNum - 1)) {
         writer.write(historyEntry);
         writer.newLine();
       }
     } finally {
       writer.flush();
       writer.close();
     }
   }
 }
  private void writeResults(List<ComboSoul> combs, String infos) {
    File out = new File("_out" + File.separator);
    out.mkdirs();
    if (combs == null || combs.size() < 1) return;
    try {
      long time = System.currentTimeMillis();
      BufferedWriter bw =
          new BufferedWriter(
              new OutputStreamWriter(
                  new FileOutputStream("_out" + File.separator + time + ".csv"), "UTF-8"));
      bw.write(ComboSoul.HEADER);
      bw.newLine();
      for (int i = 0; i < combs.size(); i++) {

        bw.write(i + ";" + combs.get(i));
        bw.newLine();
      }
      bw.close();

      if (!Main.NOINFO) {
        BufferedWriter bufferedWriter =
            new BufferedWriter(
                new OutputStreamWriter(
                    new FileOutputStream("_out" + File.separator + time + ".txt"), "UTF-8"));
        bufferedWriter.write(infos);
        bufferedWriter.close();
      }

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
  private void generate(Integer iterations) {
    BufferedWriter writerWriter = null;
    BufferedWriter readerWriter = null;
    try {
      File outputDir = sendersFile.getParentFile();
      File writerOutputFile = new File(outputDir, "WriterAccessLog.txt");
      writerWriter = new BufferedWriter(new FileWriter(writerOutputFile));

      File readerOutputFile = new File(outputDir, "ReaderAccessLog.txt");
      readerWriter = new BufferedWriter(new FileWriter(readerOutputFile));

      for (int i = 0; i < iterations; i++) {
        String message = writerLogLine();
        writerWriter.write(message);
        writerWriter.newLine();
        readerWriter.write(readerLogLine());
        readerWriter.newLine();
      }
    } catch (IOException ioe) {
      throw new RuntimeException(ioe);
    } finally {
      try {
        writerWriter.close();
        readerWriter.close();
      } catch (IOException e) {
      }
    }
  }
  // generate node_dict.tsv & edge_dict.tsv
  public static void generateDictFile() throws IOException {
    BufferedReader br =
        new BufferedReader(new FileReader("/home/xusheng/starry/baike/entity.index"));
    BufferedWriter bw =
        new BufferedWriter(
            new FileWriter("/home/xusheng/pra/examples/graphs/baike/kb_svo" + "/node_dict.tsv"));
    String line;
    while ((line = br.readLine()) != null) {
      String[] spt = line.split("\t");
      bw.write(spt[1] + "\t" + spt[0] + "\n");
    }
    br.close();
    bw.close();

    Set<String> edges = new HashSet<>();
    File f = new File("/home/xusheng/starry/baike/infobox.triple");
    br = new BufferedReader(new InputStreamReader(new FileInputStream(f), "UTF-8"));
    f = new File("/home/xusheng/pra/examples/graphs/baike/kb_svo/edge_dict.tsv");
    bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), "UTF-8"));
    while ((line = br.readLine()) != null) {
      String[] spt = line.split("\t");
      edges.add(spt[1]);
    }
    br.close();
    int cnt = 0;
    for (String edge : edges) {
      cnt++;
      bw.write(cnt + "\t" + edge + "\n");
    }
    bw.close();
    LogInfo.logs("Total number of eades: %d", edges.size());
  }
Beispiel #10
0
 public String command(final CommandSender sender, Command command, String[] args) {
   try {
     BufferedWriter banlist = new BufferedWriter(new FileWriter("banned-players.txt", true));
     for (String p : plugin.cache.keySet()) {
       for (BanInfo info : plugin.cache.get(p)) {
         if (info.getType() == BanType.BAN.getId()) {
           banlist.newLine();
           banlist.write(g(p, info.getAdmin(), info.getReason()));
         }
       }
     }
     banlist.close();
     BufferedWriter iplist = new BufferedWriter(new FileWriter("banned-ips.txt", true));
     for (String p : plugin.cacheIP.keySet()) {
       for (BanInfo info : plugin.cacheIP.get(p)) {
         if (info.getType() == BanType.IPBAN.getId()) {
           iplist.newLine();
           iplist.write(g(p, info.getAdmin(), info.getReason()));
         }
       }
     }
     iplist.close();
   } catch (IOException e) {
     String msg = ChatColor.translateAlternateColorCodes('&', lang.getString("Export.Failed"));
     if (plugin.getLog()) plugin.getLogger().severe(ChatColor.stripColor(msg));
     e.printStackTrace();
     return msg;
   }
   String msg = ChatColor.translateAlternateColorCodes('&', lang.getString("Export.Completed"));
   if (plugin.getLog()) plugin.getLogger().info(ChatColor.stripColor(msg));
   return msg;
 }
  @Before
  public void createTestFiles() {
    deleteTestFolders();

    try {
      File folder = new File(TEST_DIR_IN);
      folder.mkdir();
      folder.createNewFile();

      // create non-empty testfile
      File f = new File(TEST_PATH_1);
      f.createNewFile();
      FileWriter fstream = new FileWriter(f);
      BufferedWriter out = new BufferedWriter(fstream);
      out.write(".");
      out.close();

      // create non-empty testfile in a subfolder
      File subDir = new File(TEST_DIR_IN + "/sub");
      subDir.mkdirs();

      File f2 = new File(TEST_PATH_2);
      BufferedWriter out2 = new BufferedWriter(new FileWriter(f2));
      out2.write("...");
      out2.close();
      f2.createNewFile();
    } catch (IOException e) {
      e.printStackTrace();
      fail();
    }
  }
 private void logCompitationProblem(
     String entry, String compilationProblemMessage, ErrorType errorToCheck) {
   try {
     String[] columns = entry.split(";");
     String revision = columns[0];
     String classFile = columns[1];
     if (errorToCheck == ErrorType.IMPORT_ERROR) {
       File file = new File("results/log_parser_import_issue.csv");
       FileWriter fw = new FileWriter(file, true);
       BufferedWriter bw = new BufferedWriter(fw);
       bw.write(revision + ";" + classFile + ";" + compilationProblemMessage);
       bw.newLine();
       bw.close();
       fw.close();
     } else if (errorToCheck == ErrorType.DUPLICATED_ERROR) {
       File file = new File("results/log_parser_duplicated_issue.csv");
       FileWriter fw = new FileWriter(file, true);
       BufferedWriter bw = new BufferedWriter(fw);
       bw.write(revision + ";" + classFile + ";" + compilationProblemMessage);
       bw.newLine();
       bw.close();
       fw.close();
     }
   } catch (IOException e) {
     e.printStackTrace();
   }
 }
  /**
   * @param args
   * @throws Exception
   */
  public static void main(String[] args) throws Exception {
    System.out.println("args: " + args.length);
    if (args.length > 0) {
      NUM_APPLIANCES = new Integer(args[0]);
      if (args.length > 1 && args[1] != null) NUM_SERVICES = new Integer(args[1]);
      if (args.length > 2 && args[2] != null) NUM_PROVIDERS = new Integer(args[2]);
      if (args.length > 3 && args[3] != null) NUM_COMPONENTS = new Integer(args[3]);
      if (args.length > 4 && args[4] != null) STEP_SIZE = new Integer(args[4]);
      if (args.length > 5 && args[5] != null) COMPARISON = new Boolean(args[5]);
    }

    for (int i = 0; i < NUM_PROVIDERS; i++) {
      Provider p = new Provider("provider-" + i);
      p.getAttributes()
          .add(
              new Attribute<Double>(
                  EProviderAttribute.NETWORK_COST_RECIEVE, 100 + Math.random() * 100));
      p.getAttributes()
          .add(
              new Attribute<Double>(
                  EProviderAttribute.NETWORK_COST_SEND, 100 + Math.random() * 100));
      p.getAttributes()
          .add(
              new Attribute<Double>(
                  EProviderAttribute.INTERNET_COST_RECIEVE, 500 + Math.random() * 100));
      p.getAttributes()
          .add(
              new Attribute<Double>(
                  EProviderAttribute.INTERNET_COST_SEND, 500 + Math.random() * 100));
      providers.add(p);
    }

    // control Logging output with file
    LogManager.getLogManager()
        .readConfiguration(new FileInputStream(new File("logging.properties")));

    FileWriter fw_c = new FileWriter("out_components.txt");
    BufferedWriter out_c = new BufferedWriter(fw_c);
    out_c.write(
        "Component,Components,Number of AMIs,Number of Services,AMIs Model Creation (ms),AMIs Evaluation (ms),Services Model Creation (ms),Services Evaluation (ms),Combination Model Creation (ms),Combination Evaluation (ms),Total (ms)\n");
    out_c.close();
    FileWriter fw_f = new FileWriter("out_formation.txt");
    BufferedWriter out_f = new BufferedWriter(fw_f);
    out_f.write(
        "Components,Number of AMIs,Number of Services,Formation Solutions Model Creation (ms),Formation Solutions Evaluation (ms),Total (ms), GA vs. Full\n");
    out_f.close();

    System.out.println(
        "Parameters:"
            + ", appliances="
            + NUM_APPLIANCES
            + ", services="
            + NUM_SERVICES
            + ", providers="
            + NUM_PROVIDERS
            + ", components="
            + NUM_COMPONENTS);

    for (int i = 1; i <= NUM_COMPONENTS; i++) computeSolution(i);
  }
 @Override
 public void exportNetwork(String filePrefix, Properties props) throws IOException {
   BufferedWriter networkData = null;
   BufferedWriter nodeData = null;
   try {
     networkData = new BufferedWriter(new FileWriter(filePrefix + ".sif"));
     nodeData = new BufferedWriter(new FileWriter(filePrefix + ".node.txt"));
     String conceptGraphName = props.getProperty("ytex.conceptGraphName");
     exportNetwork(
         this.conceptDao.getConceptGraph(conceptGraphName),
         props.getProperty("ytex.corpusName"),
         conceptGraphName,
         props.getProperty("ytex.conceptSetName"),
         0,
         networkData,
         nodeData);
   } finally {
     if (networkData != null) {
       networkData.close();
     }
     if (nodeData != null) {
       nodeData.close();
     }
   }
 }
Beispiel #15
0
 public static void write(StringBuffer content) {
   try {
     // Create file
     FileWriter fstreamECF = new FileWriter(localjsECFFilePath);
     BufferedWriter outECF = new BufferedWriter(fstreamECF);
     outECF.write(content.toString());
     outECF.flush();
     // Close the output stream
     outECF.close();
     fstreamECF.close();
     FileWriter fstreamMAC = new FileWriter(localjsMACFilePath);
     BufferedWriter outMAC = new BufferedWriter(fstreamMAC);
     outMAC.write(content.toString());
     outMAC.flush();
     // Close the output stream
     outMAC.close();
     fstreamMAC.close();
     FileWriter fstreamDelayedLDAP = new FileWriter(localjsDelayedLDAPFilePath);
     BufferedWriter outDelayedLDAP = new BufferedWriter(fstreamDelayedLDAP);
     outDelayedLDAP.write(content.toString());
     outDelayedLDAP.flush();
     // Close the output stream
     outDelayedLDAP.close();
     fstreamDelayedLDAP.close();
   } catch (Exception e) {
     // Catch exception if any
     log.error("Error: " + e.getMessage());
   }
 }
Beispiel #16
0
 /*public synchronized void generateKeys() {
     BigInteger p = new BigInteger(MillerRabin.ZnajdzPierwsza(bitlen/2));
     BigInteger q = new BigInteger(MillerRabin.ZnajdzPierwsza(bitlen/2));
     n = p.multiply(q);
     BigInteger m = (p.subtract(BigInteger.ONE)).multiply(q
         .subtract(BigInteger.ONE));
     e = new BigInteger(2^(bits-2),rnd).shiftLeft(1).add(new BigInteger("1"));
     while (m.gcd(e).intValue() > 1) {
       e = e.add(new BigInteger("2"));
     }
     d = e.modInverse(m);
   }
 */
 public static void odczytPlikuTekstowegoE(String nazwa) throws IOException {
   RSA rsa = new RSA(1024);
   BigInteger n = rsa.n;
   BigInteger d = rsa.d;
   BigInteger e = rsa.e;
   BufferedWriter writer = new BufferedWriter(new FileWriter("zaszyfrowany.txt"));
   BufferedWriter writern = new BufferedWriter(new FileWriter("n.txt"));
   BufferedWriter writerd = new BufferedWriter(new FileWriter("d.txt"));
   writern.write(new String(n.toString()));
   writerd.write(new String(d.toString()));
   writern.close();
   writerd.close();
   FileInputStream fileInput = new FileInputStream(nazwa);
   int r;
   BigInteger bigIntText = new BigInteger("1");
   String text = "";
   while ((r = fileInput.read()) != -1) {
     // char c = (char) r;
     text += (char) r;
     bigIntText = new BigInteger(text.getBytes());
     if (bigIntText.compareTo(n) > 0) {
       text = text.substring(0, text.length() - 1);
       bigIntText = new BigInteger(text.getBytes());
       BigInteger wynik = bigIntText.modPow(e, n);
       writer.write(new String(wynik.toString()));
       writer.write(" ");
       text = "" + (char) r;
     }
   }
   BigInteger wynik = bigIntText.modPow(e, n);
   writer.write(new String(wynik.toString()));
   writer.close();
   fileInput.close();
 }
  /**
   * @param graphDBNameOld
   * @param graphDBName
   * @throws IOException
   * @throws ParseException
   */
  public static void pruningUnconnected(String graphDBNameOld, String graphDBName)
      throws IOException, ParseException {
    System.out.println("In Pruning Unconnected");
    SmilesParser sParser = new SmilesParser();
    // InputStream file = new FileInputStream(graphDBNameOld);
    // Graph[] graphs = sParser.parse(file, MyFactory.getGraphFactory());
    BufferedReader bin = new BufferedReader(new FileReader(graphDBNameOld));

    LinkedList<Graph> graphlists = new LinkedList<Graph>();
    String line;
    while ((line = bin.readLine()) != null) {
      int pos = line.indexOf(" => ");
      try {
        graphlists.add(
            sParser.parse(
                line.substring(pos + " => ".length()),
                line.substring(0, pos),
                MyFactory.getGraphFactory()));
        System.out.println("GraphID: " + line.substring(0, pos));
      } catch (ParseException e) {
        System.out.println(line);
        e.printStackTrace();
      }
    }

    Graph[] graphs = new Graph[graphlists.size()];
    graphlists.toArray(graphs);
    System.out.println("After Parsing all Graphs");
    int connect = 0;
    BufferedWriter indexWriter = new BufferedWriter(new FileWriter(graphDBName));
    for (int i = 0; i < graphs.length; i++) {
      if (GraphConnectivityTester.isConnected(graphs[i])) {
        StringBuffer buf = new StringBuffer();
        buf.append(connect);
        buf.append(" => ");
        buf.append(sParser.serialize(graphs[i]));
        buf.append("\n");
        indexWriter.write(buf.toString());
        connect++;
      }
    }
    indexWriter.close();
    // Write the meta information of the smile data file:
    BufferedWriter metaWriter = new BufferedWriter(new FileWriter(graphDBName + "_Meta"));
    // 1. Processing Date
    SimpleDateFormat bartDateFormat = new SimpleDateFormat("EEEE-MMMM-dd-yyyy");
    Date date = new Date();
    metaWriter.write(bartDateFormat.format(date));
    metaWriter.newLine();
    // 2. Number of graphs in this file
    metaWriter.write("Number of Graphs:" + connect);
    // Close meta data file
    try {
      metaWriter.close();
      bin.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
    System.out.println("TOTAL Graph: " + graphs.length + "Connected : " + connect);
  }
  public static void updateSave(String saveLocation, Player p, GameBoard currentMap)
      throws IOException {
    File playerFile = new File(saveLocation + "/player.txt");
    BufferedWriter w = new BufferedWriter(new FileWriter(playerFile));

    Save.savePlayerProfession(w, p.getProfession());
    Save.savePlayerStatus(w, p);
    w.close();

    BufferedWriter r = new BufferedWriter(new FileWriter(saveLocation + "/" + currentMap.mapID));
    Save.saveBoard(currentMap, r);
    Save.saveEntities(currentMap, r);
    r.close();

    File director = new File(saveLocation + "/saveInformation.txt");
    BufferedReader a = new BufferedReader(new FileReader(director));
    String name = a.readLine();
    a.close();

    BufferedWriter b = new BufferedWriter(new FileWriter(director));
    b.write(name);
    b.newLine();
    b.write(currentMap.mapID);
    b.close();
  }
  @SuppressWarnings("ResultOfMethodCallIgnored")
  @Test
  public void testUnchangedPageEntries() throws IOException, XMLStreamException {
    // source dump
    File tmpSrcDump = File.createTempFile("wiki-src-dump", "xml");
    File tmpTargetDump = File.createTempFile("wiki-target-dump", "xml");

    // base structure - "<mediawiki><page><title></title><id><id></page></mediawiki>"
    BufferedWriter bw = new BufferedWriter(new FileWriter(tmpSrcDump));
    bw.write(
        "<mediawiki><page><title>Test1</title><id>1</id></page><page><title>Test2</title><id>2</id></page></mediawiki>");
    bw.close();

    bw = new BufferedWriter(new FileWriter(tmpTargetDump));
    bw.write(
        "<mediawiki><page><title>Test1</title><id>1</id></page><page><title>Test2</title><id>2</id></page></mediawiki>");
    bw.close();

    // default diff will also include all unchanged entries
    Map<String, String> hshResults = WikipediaRevisionMapper.map(tmpSrcDump, tmpTargetDump);
    assertEquals(2, hshResults.size());

    // setting the flag to false will include unchanged entries
    hshResults = WikipediaRevisionMapper.map(tmpSrcDump, tmpTargetDump, false);
    assertEquals(0, hshResults.size());
    // remove tmp files
    tmpSrcDump.delete();
    tmpTargetDump.delete();
  }
Beispiel #20
0
  public void saveLogData() {

    Long tsLong = System.currentTimeMillis() / 1000;
    String ts = tsLong.toString();

    File mydir = new File(Environment.getExternalStorageDirectory() + "/Ajna/", "Logs");
    if (!mydir.exists()) mydir.mkdirs();

    File log28File = new File(mydir, ts + "LOG-28.csv");
    File log26File = new File(mydir, ts + "LOG-26.csv");
    // File log27File = new File(mydir, ts + "LOG-27.csv");

    try {
      FileWriter fw = new FileWriter(log28File.getAbsoluteFile());
      BufferedWriter bw = new BufferedWriter(fw);

      /*
                  StringBuilder readableLog28 = new StringBuilder();
                  StringBuilder output = removeBlankSpace(log28);


                  for (int i = 0; i < log28.length(); i += 2) {
                      String str = log28.substring(i, i + 2);
                      readableLog28.append((char) Integer.parseInt(str, 16));
                  }
      */

      // bw.write(readableLog28.toString());
      bw.write(log28.toString());
      bw.close();

      Toast.makeText(this, "Log 28 Captured", Toast.LENGTH_SHORT).show();
    } catch (IOException e) {
      e.printStackTrace();
    }

    try {
      FileWriter fw = new FileWriter(log26File.getAbsoluteFile());
      BufferedWriter bw = new BufferedWriter(fw);
      bw.write(log26.toString());
      bw.close();

      Toast.makeText(this, "Log 26 Captured", Toast.LENGTH_SHORT).show();
    } catch (IOException e) {
      e.printStackTrace();
    }

    write27LogData(mydir, ts, "LOG-27");

    log26.setLength(0);
    log27_a.setLength(0);
    log27_g.setLength(0);
    log27_q.setLength(0);
    log27_e.setLength(0);
    log27_h.setLength(0);

    log27.setLength(0);
    log28.setLength(0);
  }
 @Override
 protected void close() throws IOException {
   super.close();
   writerA.close();
   if (writerB != null) writerB.close();
   writerA = null;
   writerB = null;
 }
  public void writeRDataForBoxPlot(
      final String outputFolder, final boolean isWritingDataForEachTimeInterval) {
    if (!new File(outputFolder + "/boxPlot/").exists())
      new File(outputFolder + "/boxPlot/").mkdirs();

    SortedMap<String, SortedMap<Double, Map<Id<Person>, Double>>> userGrp2PersonToll =
        handler.getUserGrp2TimeBin2Person2Toll();

    for (String ug : userGrp2PersonToll.keySet()) {

      if (!isWritingDataForEachTimeInterval) {
        LOG.info(
            "Writing toll/trip for whole day for each user group. This data is likely to be suitable for box plot in R.");
        BufferedWriter writer =
            IOUtils.getBufferedWriter(outputFolder + "/boxPlot/toll_" + ug.toString() + ".txt");
        try {
          // sum all the values for different time bins
          Map<Id<Person>, Double> personToll = new HashMap<Id<Person>, Double>();
          for (double d : userGrp2PersonToll.get(ug).keySet()) {
            for (Id<Person> person : userGrp2PersonToll.get(ug).get(d).keySet()) {
              if (personToll.containsKey(person))
                personToll.put(
                    person, personToll.get(person) + userGrp2PersonToll.get(ug).get(d).get(person));
              else personToll.put(person, userGrp2PersonToll.get(ug).get(d).get(person));
            }
          }

          for (Id<Person> id : personToll.keySet()) {
            writer.write(personToll.get(id) + "\n");
          }
          writer.close();
        } catch (Exception e) {
          throw new RuntimeException("Data is not written in file. Reason: " + e);
        }
      } else {
        LOG.warn(
            "Writing toll/trip for each time bin and for each user group. Thus, this will write many files for each user group. This data is likely to be suitable for box plot in R. ");
        try {
          for (double d : userGrp2PersonToll.get(ug).keySet()) {
            BufferedWriter writer =
                IOUtils.getBufferedWriter(
                    outputFolder
                        + "/boxPlot/toll_"
                        + ug.toString()
                        + "_"
                        + ((int) d / 3600 + 1)
                        + "h.txt");
            for (Id<Person> person : userGrp2PersonToll.get(ug).get(d).keySet()) {
              writer.write(userGrp2PersonToll.get(ug).get(d).get(person) + "\n");
            }
            writer.close();
          }
        } catch (Exception e) {
          throw new RuntimeException("Data is not written in file. Reason: " + e);
        }
      }
    }
  }
  @SuppressWarnings("unchecked")
  public void dumpCoverageAndConceptVectorAll(
      String fileEntityMI, String fileEntityEntropy, String fileEntityFreq, String fileCoverage) {
    BufferedWriter bwFileCoverage;

    HashMap<String, Double> entityMIMap = new GetEntityMI().getEntityMI(fileEntityMI);

    HashMap<String, Double> entityEntropyMap = new GetEntityMI().getEntityMI(fileEntityEntropy);

    HashMap<String, Double> entityFreqMap = new GetEntityFreq().getEntityFreq(fileEntityFreq);
    if (entityMIMap == null) {
      try {
        bwFileCoverage = new BufferedWriter(new FileWriter(fileCoverage));
        bwFileCoverage.close();
      } catch (Exception e) {
        e.printStackTrace();
      }
      return;
    }

    conceptCoverageMap = new HashMap<>();
    Vector<Integer> conceptList = new Vector<>();

    try {
      for (Integer id : ProbaseData.conceptEntitySetMap.keySet()) {
        double coverage =
            getConceptCoverage(
                ProbaseData.idTermMap.get(id), entityMIMap, entityEntropyMap, entityFreqMap);
        if (coverage != 0) {
          conceptCoverageMap.put(id, coverage);
          conceptList.add(id);
        }
      }
    } catch (Exception e) {
      e.printStackTrace();
    }

    System.setProperty("java.util.Arrays.useLegacyMergeSort", "true");
    Collections.sort(conceptList, new cmp());
    try {
      bwFileCoverage = new BufferedWriter(new FileWriter(fileCoverage));

      for (Integer id : conceptList) {
        bwFileCoverage.write(
            String.valueOf(id)
                + "\t"
                + ProbaseData.idTermMap.get(id)
                + "\t"
                + String.valueOf(conceptCoverageMap.get(id)));
        bwFileCoverage.newLine();
        bwFileCoverage.flush();
      }
      bwFileCoverage.flush();
      bwFileCoverage.close();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
  /**
   * Connect vertices in the graph. Connects vertices if startVertex and endVertex may be connected
   * by a directed edge. Ensures that: * startVertex is a RequiredPort and endVertex is a
   * ProvidedPort * the required port has not yet reached the maximal number of connections {@link
   * RequiredPort.getMaxDegree() getMaxDegree} * the ports do not belong to the same component * the
   * ports are not yet connected
   *
   * @param startVertex
   * @param endVertex
   * @param graph
   * @return true if the two vertexes were connected
   */
  protected boolean connect(NubiSaveVertex startVertex, NubiSaveVertex endVertex) {
    boolean shouldNotConnect = shouldNotConnect(startVertex, endVertex);
    System.out.println("nubisavecomponentgraphmouseplugin: connect");
    if (shouldNotConnect) {
      System.out.println("nubisavecomponentgraphmouseplugin: returning false");
      return false;
    }
    AbstractNubisaveComponent start =
        (AbstractNubisaveComponent) ((RequiredPort) startVertex).getParentComponent();
    AbstractNubisaveComponent end =
        (AbstractNubisaveComponent) ((ProvidedPort) endVertex).getParentComponent();
    if (!isConnected(startVertex, endVertex)) {
      BufferedWriter writer = null;
      try {
        System.out.println("is not connected");
        start.connectToProvidedPort(end);
        WeightedNubisaveVertexEdge edge = edgeFactory.create();
        edge.setWeight(end.getNrOfFilePartsToStore());
        graph.addEdge(edge, startVertex, endVertex, EdgeType.DIRECTED);
        File file = new File(storage_directory + "/" + "connections.txt");
        if (!file.exists()) {
          file.createNewFile();
        }
        writer = new BufferedWriter(new FileWriter(file, true));
        writer.write(start.getUniqueName());
        writer.write(" ");
        writer.write(end.getUniqueName());
        writer.newLine();
        writer.close();
      } catch (IOException ex) {
        Logger.getLogger(AbstractNubisaveComponentEdgeCreator.class.getName())
            .log(Level.SEVERE, null, ex);
      } finally {
        try {
          writer.close();
        } catch (IOException ex) {
          Logger.getLogger(AbstractNubisaveComponentEdgeCreator.class.getName())
              .log(Level.SEVERE, null, ex);
        }
      }

    } else {
      System.out.println("is  connected --> increase weight");
      WeightedNubisaveVertexEdge edge =
          (WeightedNubisaveVertexEdge) graph.findEdge(startVertex, endVertex);
      System.out.println("edge weight: " + edge.getWeight());
      int before = end.getNrOfFilePartsToStore();
      System.out.println("nroffileparts1: " + before);
      end.setNrOfFilePartsToStore(end.getNrOfFilePartsToStore() + 1);
      System.out.println("nroffileparts2: " + end.getNrOfFilePartsToStore());
      assert (end.getNrOfFilePartsToStore() - 1) == before;
      edge.setWeight(end.getNrOfFilePartsToStore());
      assert (edge.getWeight() - 1) == before;
      System.out.println("incrreased edge weight: " + edge.getWeight());
    }
    System.out.println("nubisavecomponentgraphmouseplugin: returning true");
    return true;
  }
  @Test
  public void testSinglePageRedirection() throws IOException, XMLStreamException {
    // source dump
    File tmpOldDump = File.createTempFile("wiki-src-dump", "xml");
    File tmpNewDump = File.createTempFile("wiki-target-dump", "xml");

    // base structure - "<mediawiki><page><title></title><id><id></page></mediawiki>"
    BufferedWriter bw = new BufferedWriter(new FileWriter(tmpOldDump));
    bw.write(
        "<mediawiki>"
            + "<page>"
            + "<title>Test1</title>"
            + "<id>1</id>"
            + "<revision>"
            + "<id>1234556</id>"
            + "<text xml:space=\"preserve\">This is entry about Test1 and it will be redirected.</text>"
            + "</revision>"
            + "</page>"
            + "<page>"
            + "<title>Test2</title>"
            + "<id>2</id>"
            + "</page>"
            + "<page>"
            + "<title>Test3</title>"
            + "<id>3</id>"
            + "</page>"
            + "</mediawiki>");
    bw.close();

    bw = new BufferedWriter(new FileWriter(tmpNewDump));
    bw.write(
        "<mediawiki>"
            + "<page>"
            + "<title>Test1</title>"
            + "<id>1</id>"
            + "<redirect/>"
            + "<revision>"
            + "<id>1234556</id>"
            + "<text xml:space=\"preserve\">#REDIRECT [[Test3]] {{R from CamelCase}}</text>"
            + "</revision>"
            + "</page>"
            + "<page>"
            + "<title>NEW_Test2</title>"
            + "<id>2</id>"
            + "</page>"
            + "<page>"
            + "<title>Test3</title>"
            + "<id>3</id>"
            + "</page>"
            + "</mediawiki>");
    bw.close();
    Map<String, String> hshResults = WikipediaRevisionMapper.map(tmpOldDump, tmpNewDump);
    assertEquals(3, hshResults.size());
    assertEquals("Test3", hshResults.get("Test1"));

    tmpNewDump.delete();
    tmpOldDump.delete();
  }
Beispiel #26
0
 @Override
 public void closeFiles() {
   try {
     if (objectFile != null) objectFile.close();
     if (subjectFile != null) subjectFile.close();
   } catch (Exception ex) {
     ex.printStackTrace();
   }
 }
Beispiel #27
0
  public String run(String dir, OUTPUT output) throws Exception {
    String filename = dir + "/alignment";

    FastaReader<DNASequence, NucleotideCompound> fastaReader =
        new FastaReader<DNASequence, NucleotideCompound>(
            new File(filename),
            new GenericFastaHeaderParser<DNASequence, NucleotideCompound>(),
            new DNASequenceCreator(AmbiguityDNACompoundSet.getDNACompoundSet()));

    // fastaReader.process returns LinkedHashMap, so order of sequences is preserved
    Map<String, DNASequence> fasta = fastaReader.process();

    // Write out the names of the sequences - we rename them to reroot sequence through RaXML and
    // PAML
    BufferedWriter names_out = new BufferedWriter(new FileWriter(filename + ".names"));
    for (Map.Entry<String, DNASequence> e : fasta.entrySet())
      names_out.write(String.format("%s\n", e.getKey()));
    names_out.close();

    // Output a PHYLIP file, which is the accepted format for both RAxML and PAML
    // However, the PAML file, for baseml requires a 'GC' in the header which makes RAxML error

    String outfilename;
    String header = fasta.size() + " " + fasta.values().iterator().next().getLength();
    if (output == OUTPUT.PAML) {
      outfilename = filename + ".paml.phylip";
      header = header + " GC";
    } else {
      outfilename = filename + ".raxml.phylip";
    }

    try {
      BufferedWriter phylip_out = new BufferedWriter(new FileWriter(outfilename));
      phylip_out.write(header + "\n");
      int sequenceCount = 1;

      for (Map.Entry<String, DNASequence> e : fasta.entrySet()) {
        phylip_out.write(
            String.format(
                "seq_%s    %s\n",
                sequenceCount++, e.getValue().getSequenceAsString().toUpperCase()));
      }

      /*
      for (String name : fasta.keySet()) {
          phylip_out.write(String.format("seq_%s    %s\n", sequenceCount++, fasta.get(name).toString().toUpperCase()));
      }*/

      phylip_out.close();
    } catch (Exception e) {
      e.printStackTrace();
      throw new RuntimeException(e);
    }
    return String.format(
        "Alignment has %s sequences, each with %s sites.",
        fasta.size(), fasta.values().iterator().next().getLength());
  }
Beispiel #28
0
  public void innerExecute() throws IOException, JerializerException {
    // first arg - schema dir, second arg - dest dir
    // TODO write schemas

    File schemas = new File(schemaDir);
    assert schemas.isDirectory() && schemas.canRead();

    File dest = new File(destDir);
    assert !dest.exists();
    dest.mkdir();
    assert dest.isDirectory() && dest.canWrite();

    Set<Klass> genKlasses = new TreeSet<Klass>();

    JsonParser parser = new JsonParser();
    for (File schema : schemas.listFiles()) {
      BufferedReader reader = new BufferedReader(new FileReader(schema));
      JThing thing = parser.parse(reader);
      System.out.println(thing);
      String rootString = schemas.toURI().toString();
      if (!rootString.endsWith("/")) rootString = rootString + "/";
      String klassName =
          KlassContext.capitalize(schema.toURI().toString().substring(rootString.length()));
      String packageName = basePackage + "." + klassName.toLowerCase();

      GenWritable writable = parseSchemaThing(klassName, packageName, thing);

      Map<String, String> m = writable.makeClassToTextMap();

      final File dir = new File(destDir + "/" + klassName.toLowerCase());
      dir.mkdirs();
      for (Map.Entry<String, String> entry : m.entrySet()) {
        final String fullClass = entry.getKey();
        final String contents = entry.getValue();
        final String relName = fullClass.substring(fullClass.lastIndexOf(".") + 1) + ".java";
        final File f = new File(dir, relName);
        FileWriter writer = new FileWriter(f);
        BufferedWriter bufferedWriter = new BufferedWriter(writer);
        bufferedWriter.write(contents, 0, contents.length());
        bufferedWriter.close();
      }

      genKlasses.add(new Klass(klassName, packageName));
    }

    RegistryGen registryGen =
        new RegistryGen(new Klass("GenschemaRegistryFactory", basePackage), genKlasses);
    final File g = new File(destDir + "/GenschemaRegistryFactory.java");
    for (Map.Entry<String, String> entry : registryGen.makeClassToTextMap().entrySet()) {
      final String contents = entry.getValue();
      FileWriter writer = new FileWriter(g);
      BufferedWriter bufferedWriter = new BufferedWriter(writer);
      bufferedWriter.write(contents, 0, contents.length());
      bufferedWriter.close();
      break;
    }
  }
  // save log to file and return warning messages
  private String logToFile() {
    if (getPackageManager().checkPermission(android.Manifest.permission.READ_LOGS, getPackageName())
        != 0) {
      return null;
    }

    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd.HH'h'mm'm'ss's'");
    String logfilename =
        AdPreviewer.ADPREVIEWER_DIR
            + this.getIntent().getExtras().getString("originalFilename")
            + "."
            + dateFormat.format(new Date());
    File logfile = new File(logfilename + ".log");
    File warningsLogfile = new File(logfilename + ".WARNING.log");
    String pidString = null;
    try {
      Log.i(CLASSTAG, "creating log file: " + logfilename);
      logfile.createNewFile();

      Process process = Runtime.getRuntime().exec("logcat -v time -d");
      BufferedReader bufferedReader =
          new BufferedReader(new InputStreamReader(process.getInputStream()));
      StringBuilder log = new StringBuilder();
      StringBuilder warningsLog = new StringBuilder();
      String line;
      int warningsCount = 0;
      while ((line = bufferedReader.readLine()) != null) {
        if (pidString == null && line.indexOf("AdContext") > 0) {
          pidString = line.substring(line.indexOf("("), line.indexOf(")") + 1);
        }
        if (pidString != null && line.indexOf(pidString) > 0) {
          log.append(line + "\n");
          if (line.indexOf("W/") >= 0 || line.indexOf("E/") >= 0) {
            warningsLog.append(line + "\n");
            warningsCount++;
          }
        }
      }

      BufferedWriter out = new BufferedWriter(new FileWriter(logfile));
      out.write(log.toString());
      out.close();

      if (warningsCount > 0) {
        warningsLogfile.createNewFile();
        BufferedWriter out2 = new BufferedWriter(new FileWriter(warningsLogfile));
        out2.write(warningsLog.toString());
        out2.close();
        return warningsLog.toString();
      }
    } catch (IOException e) {
      e.printStackTrace();
    }
    return null;
  }
  public static void beginParsing() throws IOException {
    recordMap = new HashMap<>();
    tableMap = new Maps().generateTableMap();
    File vaxFile = new File(FILE_PATH + File.separator + FILE_NAME);

    BufferedReader theFile = new BufferedReader(new FileReader(vaxFile));
    String line = "";
    int lineNum = 0;
    while ((line = theFile.readLine()) != null) {
      lineNum++;
      parseLine(lineNum, line);
    }

    // Close the BufferedReader.
    if (theFile != null) {
      theFile.close();
    }
    // Print map contents:
    writeToLog("\n--------------- PARSING FINISHED ---------------\n");

    // writeToFile("Record Map values: ");
    for (Entry<Integer, ArrayList<Record>> entry : recordMap.entrySet()) {
      // writeToFile("\nRecord type: " + entry.getKey());
      for (Record record : entry.getValue()) {
        /*writeToFile("\tType: " + record.getRecordType() + " Action: "
        + record.getActionCode() + " Ref: " + record.getReferenceId());*/
        // + " Record: " + record.getText());
        String action = "";
        switch (record.getActionCode()) {
          case 51:
            action = "INSERT";
            break;
          case 53:
            action = "UPDATE";
            break;
          case 54:
            action = "DELETE";
            break;
          default:
            action = "NOTRECOGNIZED";
            break;
        }
        writeToFile(
            tableMap.get(record.getRecordType()) + "," + action + "," + record.getReferenceId());
      }
    }

    // Close Buffered Writers.
    if (theLog != null) {
      theLog.close();
    }
    if (theOutputFile != null) {
      theOutputFile.close();
    }
  }