Пример #1
0
  private static void writeFile(String tissue, boolean onlyOnePerCage) throws Exception {

    BufferedReader reader =
        new BufferedReader(
            new FileReader(
                new File(
                    ConfigReader.getRachSachReanalysisDir()
                        + File.separator
                        + "pcoa_otu_"
                        + tissue
                        + "_taxaAsColsLogNorm.txt")));

    BufferedWriter writer =
        new BufferedWriter(
            new FileWriter(
                new File(
                    ConfigReader.getRachSachReanalysisDir()
                        + File.separator
                        + (onlyOnePerCage
                            ? "pcoa_otu_Colon Content_taxaAsColsLogNormWithMetadataOnePerCage.txt"
                            : "pcoa_otu_"
                                + tissue
                                + "_taxaAsColsLogNormWithMetadataAllMice.txt"))));

    writer.write("sample\tcondition\ttissue\tcage\t");
    writer.write(reader.readLine() + "\n");
    HashMap<String, String> cageMap = getCageMap();

    HashMap<String, MappingFileLine> map = MappingFileLine.getMap();
    HashSet<String> cages = new HashSet<String>();

    for (String s = reader.readLine(); s != null; s = reader.readLine()) {
      String[] splits = s.split("\t");
      MappingFileLine mfl = map.get(splits[0].replaceAll("\"", ""));
      String cage = cageMap.get(mfl.getRatID());

      if (!onlyOnePerCage || !cages.contains(cage)) {
        writer.write(splits[0].replaceAll("\"", "") + "\t");
        writer.write(mfl.getLine() + "\t");
        writer.write(mfl.getTissue() + "\t");
        writer.write(cage);

        for (int x = 1; x < splits.length; x++) writer.write("\t" + splits[x]);

        writer.write("\n");
        cages.add(cage);
      }
    }

    writer.flush();
    writer.close();
  }
  public static HashMap<String, MetadataParserFileLine> parseMetadata() throws Exception {
    HashMap<String, MetadataParserFileLine> map =
        new LinkedHashMap<String, MetadataParserFileLine>();

    BufferedReader reader =
        new BufferedReader(
            new FileReader(
                new File(
                    ConfigReader.getBigDataScalingFactorsDir()
                        + File.separator
                        + "MouseDonors"
                        + File.separator
                        + "map.txt")));

    reader.readLine();

    for (String s = reader.readLine(); s != null; s = reader.readLine()) {
      MetadataParserFileLine mpfl = new MetadataParserFileLine(s);

      if (map.containsKey(mpfl.sample)) throw new Exception("Duplicate");

      map.put(mpfl.sample, mpfl);
    }

    return map;
  }
Пример #3
0
  public static HashMap<String, String> getTaxaMap(int level) throws Exception {
    HashMap<String, String> map = new HashMap<String, String>();

    BufferedReader reader =
        new BufferedReader(
            new FileReader(
                new File(
                    ConfigReader.getGoranTrialDir()
                        + File.separator
                        + "OTU_Front_DATA Serum Solar Sano 02-20-2015.txt")));

    reader.readLine();
    reader.readLine();

    for (String s = reader.readLine(); s != null; s = reader.readLine()) {
      String[] splits = s.split("\t");

      // System.out.println(splits[2]);

      if (splits[2].split(";").length - 1 >= level) {
        String familyString = splits[2].split(";")[level];
        familyString =
            new StringTokenizer(familyString, "(").nextToken().replaceAll("\"", "").trim();

        if (map.containsKey(splits[0])) throw new Exception("No");

        map.put(splits[0], familyString);
      }
    }

    return map;
  }
  public static HashMap<String, Double> getOneWayAnovaPValues(ENode root) throws Exception {
    HashMap<String, Double> returnMap = new HashMap<String, Double>();
    HashMap<Float, List<ENode>> map = ReadCluster.getMapByLevel(root);
    System.out.println(map.size());

    for (Float f : map.keySet()) {
      File outFile =
          new File(
              ConfigReader.getETreeTestDir()
                  + File.separator
                  + "Mel74ColumnsAsTaxaFor"
                  + f
                  + ".txt");
      System.out.println(outFile.getAbsolutePath());
      PivotOut.pivotOut(map.get(f), outFile.getAbsolutePath());
      OtuWrapper wrapper = new OtuWrapper(outFile);
      List<List<Double>> list = wrapper.getDataPointsNormalizedThenLogged();

      for (int x = 0; x < wrapper.getOtuNames().size(); x++) {
        double pValue = 1;

        if (!wrapper.getOtuNames().get(x).equals(ETree.ROOT_NAME)) {
          List<Number> data = new ArrayList<Number>();
          List<String> factors = new ArrayList<String>();
          for (int y = 0; y < wrapper.getSampleNames().size(); y++) {
            if (!wrapper.getSampleNames().get(y).equals(ETree.ROOT_NAME)) {
              data.add(list.get(y).get(x));
              factors.add("" + stripSuffix(wrapper.getSampleNames().get(y)));
            }
          }

          OneWayAnova owa = new OneWayAnova(data, factors);

          if (map.containsKey(wrapper.getOtuNames().get(x))) throw new Exception("Duplicate");

          pValue = owa.getPValue();
        }

        returnMap.put(wrapper.getOtuNames().get(x), pValue);
      }
    }

    return returnMap;
  }
Пример #5
0
  private static HashMap<String, String> getCageMap() throws Exception {
    HashMap<String, String> cageMap = new HashMap<String, String>();

    BufferedReader reader =
        new BufferedReader(
            new FileReader(
                new File(
                    ConfigReader.getRachSachReanalysisDir()
                        + File.separator
                        + "TTULyteCages.txt")));

    reader.readLine();
    for (String s = reader.readLine(); s != null; s = reader.readLine()) {
      String[] splits = s.split("\t");

      if (cageMap.containsKey(splits[1])) throw new Exception("No");

      cageMap.put(splits[1], splits[0]);
    }

    return cageMap;
  }
  public static void main(String[] args) throws Exception {
    File topDir = new File(ConfigReader.getVanderbiltDir() + File.separator + "krakenOut");

    for (int x = 0; x < KRAKEN_LEVELS.length; x++) {
      HashMap<String, HashMap<String, Integer>> map =
          new HashMap<String, HashMap<String, Integer>>();

      System.out.println(KRAKEN_LEVELS[x]);
      for (String s : topDir.list())
        if (s.startsWith("standardReport_for_Sample_") && s.indexOf("16S") == -1) {
          // System.out.println("NON 16S " + s);
          addToMap(new File(topDir.getAbsolutePath() + File.separator + s), map, KRAKEN_LEVELS[x]);
        }

      File outFile =
          new File(
              ConfigReader.getVanderbiltDir()
                  + File.separator
                  + "spreadsheets"
                  + File.separator
                  + "kraken_"
                  + RDP_LEVELS[x]
                  + "_taxaAsColumns.txt");
      PivotOTUs.writeResults(map, outFile.getAbsolutePath());

      OtuWrapper wrapper = new OtuWrapper(outFile);
      wrapper.writeNormalizedLoggedDataToFile(
          ConfigReader.getVanderbiltDir()
              + File.separator
              + "spreadsheets"
              + File.separator
              + "kraken_"
              + RDP_LEVELS[x]
              + "_taxaAsColumnsLogNorm.txt");
    }

    for (int x = 0; x < KRAKEN_LEVELS.length; x++) {
      HashMap<String, HashMap<String, Integer>> map =
          new HashMap<String, HashMap<String, Integer>>();

      System.out.println(KRAKEN_LEVELS[x]);
      for (String s : topDir.list()) {
        // System.out.println(s);
        if (s.startsWith("standardReport_for_") && s.indexOf("16S") != -1) {
          // System.out.println("For 16S " + s);
          addToMap(new File(topDir.getAbsolutePath() + File.separator + s), map, KRAKEN_LEVELS[x]);
        }
      }

      File outFile =
          new File(
              ConfigReader.getVanderbiltDir()
                  + File.separator
                  + "spreadsheets"
                  + File.separator
                  + "kraken_"
                  + RDP_LEVELS[x]
                  + "_taxaAsColumnsFor16S.txt");
      PivotOTUs.writeResults(map, outFile.getAbsolutePath());

      OtuWrapper wrapper = new OtuWrapper(outFile);
      wrapper.writeNormalizedLoggedDataToFile(
          ConfigReader.getVanderbiltDir()
              + File.separator
              + "spreadsheets"
              + File.separator
              + "kraken_"
              + RDP_LEVELS[x]
              + "_taxaAsColumnsLogNormFor16S.txt");
    }
  }