Example #1
0
 public void sendAlertToAllUsers(String Message) {
   try {
     ServerMessage Alert = new ServerMessage(Outconming.OldAlert);
     Alert.writeInt(1);
     Alert.writeUTF(Message);
     Iterator reader = getUserFromId.entrySet().iterator();
     while (reader.hasNext()) {
       User nUser = (User) (((Map.Entry) reader.next()).getValue());
       if (nUser.isOnline) Alert.Send(nUser.Session);
     }
   } catch (Exception e) {
     e.printStackTrace();
   }
 }
Example #2
0
 public static List<User> searchHabbo(String SSO) {
   List<User> mUser = new ArrayList<User>();
   try {
     Iterator reader = getUserFromId.entrySet().iterator();
     while (reader.hasNext()) {
       User nUser = (User) (((Map.Entry) reader.next()).getValue());
       if (nUser.userName.toUpperCase().contains(SSO.toUpperCase())) {
         mUser.add(nUser);
       }
     }
   } catch (Exception ex) {
     // ex.printStackTrace();
     return mUser;
   }
   return mUser;
 }
Example #3
0
  public int computeMinDist() {
    int min = Integer.MAX_VALUE;
    this.minCommonNode = NULL_NODE;
    for (Entry<Long, Integer> n : distSource.entrySet()) {
      long node = n.getKey();
      if (distTarget.contains(node)) {
        int sumDist = n.getValue() + distTarget.get(node);
        if (sumDist < min) {
          min = sumDist;
          this.minCommonNode = node;
        }
      }
    }

    if (min == Integer.MAX_VALUE) min = -1;
    return min;
  }
Example #4
0
 public static User getHabboFromName(String SSO) {
   User mResult;
   try {
     mResult = null;
     Iterator reader = getUserFromId.entrySet().iterator();
     while (reader.hasNext()) {
       User nUser = (User) (((Map.Entry) reader.next()).getValue());
       if (nUser.userName.equals(SSO)) {
         return nUser;
       }
     }
   } catch (Exception ex) {
     ex.printStackTrace();
     return null;
   }
   return mResult;
 }
Example #5
0
  public static User getHabboFromSSO(String SSO) {
    User mResult = null;
    try {
      Iterator reader = getUserFromId.entrySet().iterator();

      while (reader.hasNext()) {
        User nUser = (User) (((Map.Entry) reader.next()).getValue());
        if (nUser.SSO.equals(SSO)) {
          return nUser;
        }
      }
      ResultSet mResult2 =
          Legion.getDetabase()
              .executeQuery("SELECT * FROM  `users` WHERE `ssoTicket`='" + SSO + "'");
      if (mResult2.next()) {
        User h = new User();
        h.Id = mResult2.getInt("id");
        h.userName = mResult2.getString("username");
        h.realName = mResult2.getString("realname");
        h.Look = mResult2.getString("look");
        h.SSO = mResult2.getString("ssoTicket");
        h.Email = mResult2.getString("email");
        h.Mission = mResult2.getString("mission");
        h.Coin = mResult2.getInt("coins");
        h.OtherCoin = mResult2.getInt("othercoins");
        h.Score = mResult2.getInt("score");
        h.Gender = mResult2.getString("Gender");
        h.Respects = mResult2.getInt("Respects");
        h.maxUserRespects = mResult2.getInt("Max_Users_Respects");
        h.maxPetsRespects = mResult2.getInt("Max_Pets_Respects");
        h.Rank = mResult2.getInt("rank");
        h.bannTime = mResult2.getInt("ban_time");
        h.banCount = mResult2.getInt("ban_count");
        h.habboClubInit = mResult2.getString("Habbo_Club_Init");
        h.habboClubExpire = mResult2.getString("Habbo_Club_Expire");
        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
        Date date = new Date();
        Date mDate = dateFormat.parse(h.habboClubExpire);
        h.Habbo_Club = (date.getTime() < mDate.getTime());
        h.isBanned = mResult2.getBoolean("ban");
        h.isKuick = mResult2.getBoolean("kuick");
        h.registredTime = mResult2.getString("registred_time");
        h.lastLogin = mResult2.getString("last_login");
        h.isOnline = false;
        h.isInRoom = false;
        h.isWriting = false;
        h.isWalking = false;
        h.x = 0;
        h.y = 0;
        h.z = 0.0;
        h.rot = 0;
        h.rothead = 0;
        if (!getUserFromId.containsKey(h.Id)) getUserFromId.put(h.Id, h);
        else {
          getUserFromId.remove(h.Id);
          getUserFromId.put(h.Id, h);
        }
        mResult = h;
      }
    } catch (Exception ex) {
      ex.printStackTrace();
      return null;
    }
    return mResult;
  }
  public final void compareOverlapAndZScoreDirectionTwoEQTLFiles(
      String eQTL,
      String meQTL,
      String eQTMFile,
      String outputFile,
      boolean matchOnGeneName,
      double fdrCutt,
      boolean matchSnpOnPos,
      boolean splitGeneNames,
      boolean flipUsingEQTM,
      boolean topeffect)
      throws IOException, Exception {
    System.out.println("Performing comparison of eQTLs and meQTLs");
    double filterOnFDR =
        fdrCutt; // Do we want to use another FDR measure? When set to -1 this is not used at all.

    HashSet<String> hashExcludeEQTLs =
        new HashSet<
            String>(); // We can exclude some eQTLs from the analysis. If requested, put the entire
                       // eQTL string in this HashMap for each eQTL. Does not work in combination
                       // with mathcing based on chr and pos
    HashSet<String> hashConfineAnalysisToSubsetOfProbes =
        new HashSet<
            String>(); // We can confine the analysis to only a subset of probes. If requested put
                       // the probe name in this HapMap
    HashSet<String> hashTestedSNPsThatPassedQC =
        null; // We can confine the analysis to only those eQTLs for which the SNP has been
              // successfully passed QC, otherwise sometimes unfair comparisons are made. If
              // requested, put the SNP name in this HashMap

    // Load the eQTM File
    QTLTextFile eQTLsTextFile = new QTLTextFile(eQTMFile, QTLTextFile.R);

    HashMap<String, ArrayList<EQTL>> eQtmInfo = new HashMap<String, ArrayList<EQTL>>();

    for (Iterator<EQTL> eQtlIt = eQTLsTextFile.getEQtlIterator(); eQtlIt.hasNext(); ) {
      EQTL eQtm = eQtlIt.next();
      String eQtmKey = eQtm.getRsName();

      if (!eQtm.getAlleleAssessed().equals("C")) {
        eQtm.setAlleleAssessed("C");
        eQtm.setZscore(eQtm.getZscore() * -1);

        Double[] zscores = eQtm.getDatasetZScores();
        Double[] correlation = eQtm.getCorrelations();
        for (int i = 0; i < eQtm.getDatasets().length; ++i) {
          zscores[i] *= -1;
          correlation[i] *= -1;
        }
        eQtm.setDatasetZScores(zscores);
        eQtm.setCorrelations(correlation);
      }

      ArrayList<EQTL> posEqtls = eQtmInfo.get(eQtmKey);

      if (posEqtls == null) {
        posEqtls = new ArrayList<EQTL>(1);
        posEqtls.add(eQtm);
        eQtmInfo.put(eQtmKey, posEqtls);
      } else if (!topeffect) {
        eQtmInfo.get(eQtmKey).add(eQtm);
      }
    }

    System.out.println("eQTMs read in: " + eQtmInfo.size());

    // Now load the eQTLs for file 1:
    THashMap<String, String[]> hashEQTLs = new THashMap<String, String[]>();
    THashSet<String> hashUniqueProbes = new THashSet<String>();
    THashSet<String> hashUniqueGenes = new THashSet<String>();

    TextFile in = new TextFile(eQTL, TextFile.R);
    in.readLine();
    String[] data = in.readLineElemsReturnReference(SPLIT_ON_TAB);

    if (data.length < 5) {
      throw new IllegalStateException(
          "QTL File does not have enough columns. Detected columns: "
              + data.length
              + " in file "
              + in.getFileName());
    }

    while (data != null) {
      if (filterOnFDR == -1 || Double.parseDouble(data[18]) <= filterOnFDR) {
        if (hashConfineAnalysisToSubsetOfProbes.isEmpty()
            || hashConfineAnalysisToSubsetOfProbes.contains(data[4])) {
          if (matchOnGeneName) {
            if (data[16].length() > 1) {

              if (splitGeneNames) {
                for (String gene : SEMI_COLON_PATTERN.split(data[16])) {

                  hashEQTLs.put(
                      (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + gene, data);
                  hashUniqueProbes.add(data[4]);
                  hashUniqueGenes.add(gene);
                }
              } else {

                if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[16])) {
                  hashEQTLs.put(
                      (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[16], data);
                  hashUniqueProbes.add(data[4]);
                  hashUniqueGenes.add(data[16]);
                  // log.write("Added eQTL from original file " + (matchSnpOnPos ? data[2] + ":" +
                  // data[3] : data[1]) + "\t" + data[16]);
                }
              }
            }
          } else {
            if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[4])) {
              hashEQTLs.put(
                  (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[4], data);
              hashUniqueProbes.add(data[4]);
              hashUniqueGenes.add(data[16]);
              //	log.write("Added eQTL from original file " + (matchSnpOnPos ? data[2] + ":" +
              // data[3] : data[1]) + "\t" + data[4]);
            }
          }
        }
        data = in.readLineElemsReturnReference(SPLIT_ON_TAB);
      }
    }
    in.close();

    int nrUniqueProbes = hashUniqueProbes.size();
    int nrUniqueGenes = hashUniqueGenes.size();
    hashUniqueProbes = null;
    hashUniqueGenes = null;

    // Initialize Graphics2D for the Z-Score allelic direction comparison:
    //        int width = 1000;
    //        int height = 1000;
    //        int margin = 100;
    //        int x0 = margin;
    //        int x1 = width - margin;
    //        int y0 = margin;
    //        int y1 = height - margin;

    ZScorePlot zs = new ZScorePlot();
    String zsOutFileName = outputFile + "-ZScoreComparison.pdf";
    zs.init(2, new String[] {"eQTLs", "meQTLs"}, true, zsOutFileName);

    // Variables holding variousStatistics:
    int nreQTLsIdenticalDirection = 0;
    int nreQTLsOppositeDirection = 0;
    HashMap<String, Integer> hashEQTLNrTimesAssessed = new HashMap<String, Integer>();

    THashSet<String> hashEQTLs2 = new THashSet<String>();
    THashSet<String> hashUniqueProbes2 = new THashSet<String>();
    THashSet<String> hashUniqueGenes2 = new THashSet<String>();
    THashSet<String> hashUniqueProbesOverlap = new THashSet<String>();
    THashSet<String> hashUniqueGenesOverlap = new THashSet<String>();

    int counterFile2 = 0;
    int overlap = 0;
    ArrayDoubleList vecX = new ArrayDoubleList();
    ArrayDoubleList vecY = new ArrayDoubleList();

    // Vector holding all opposite allelic effects:
    //        LinkedHashSet<String> vecOppositeEQTLs = new LinkedHashSet<String>();

    // Now process file 2:
    in = new TextFile(meQTL, TextFile.R);
    in.readLine();

    int skippedDueToMapping = 0;
    data = null;
    TextFile identicalOut =
        new TextFile(outputFile + "-eQTLsWithIdenticalDirecton.txt.gz", TextFile.W);
    TextFile disconcordantOut = new TextFile(outputFile + "-OppositeEQTLs.txt", TextFile.W);
    TextFile log = new TextFile(outputFile + "-eQTL-meQTL-ComparisonLog.txt", TextFile.W);
    TextFile log2 = new TextFile(outputFile + "-eQTM-missingnessLog.txt", TextFile.W);

    THashSet<String> identifiersUsed = new THashSet<String>();

    while ((data = in.readLineElemsReturnReference(SPLIT_ON_TAB)) != null) {

      if (filterOnFDR == -1 || Double.parseDouble(data[18]) <= filterOnFDR) {
        if (!eQtmInfo.containsKey(data[4])) {
          skippedDueToMapping++;
          log2.write(
              "meQTL probe not present In eQTM file:\t"
                  + data[4]
                  + ", effect statistics: \t"
                  + data[0]
                  + "\t"
                  + data[2]
                  + "\t"
                  + data[3]
                  + "\t"
                  + data[16]
                  + "\n");
          continue;
        }

        String orgDataFour = data[4];

        for (int i = 0; i < eQtmInfo.get(orgDataFour).size(); ++i) {
          if (topeffect && i > 0) {
            break;
          }
          data[16] = eQtmInfo.get(orgDataFour).get(i).getProbeHUGO();
          data[4] = eQtmInfo.get(orgDataFour).get(i).getProbe();

          if (flipUsingEQTM) {
            Double zScoreQTM = eQtmInfo.get(orgDataFour).get(i).getZscore();
            if (zScoreQTM < 0) {
              data[10] = String.valueOf(Double.parseDouble(data[10]) * -1);
            }
          }

          if (hashConfineAnalysisToSubsetOfProbes.isEmpty()
              || hashConfineAnalysisToSubsetOfProbes.contains(data[4])) {
            if (matchOnGeneName) {
              if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[16])) {
                if (data[16].length() > 1) {

                  if (splitGeneNames) {
                    for (String gene : SEMI_COLON_PATTERN.split(data[16])) {

                      hashUniqueProbes2.add(data[4]);
                      hashUniqueGenes2.add(gene);
                      if (!hashEQTLs2.contains(
                          (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + gene)) {
                        hashEQTLs2.add(
                            (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + gene);
                        counterFile2++;
                      }
                    }
                  } else {

                    hashUniqueProbes2.add(data[4]);
                    hashUniqueGenes2.add(data[16]);
                    if (!hashEQTLs2.contains(
                        (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[16])) {
                      hashEQTLs2.add(
                          (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[16]);
                      counterFile2++;
                    }
                  }
                }
              }
            } else {
              if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[4])) {
                // hashEQTLs2.put(data[1] + "\t" + data[4], str);
                hashUniqueProbes2.add(data[4]);
                hashUniqueGenes2.add(data[16]);
                counterFile2++;
              }
            }
          }
          String[] QTL = null;
          String identifier = null;
          if (matchOnGeneName) {

            if (data.length > 16 && data[16].length() > 1) {
              if (splitGeneNames) {
                // NB Plotting and processing of all QTLs here is not okay!
                for (String gene : SEMI_COLON_PATTERN.split(data[16])) {
                  if (!hashExcludeEQTLs.contains(data[1] + "\t" + gene)) {
                    identifier = (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + gene;
                    if (hashEQTLs.containsKey(identifier)) {
                      QTL = hashEQTLs.get(identifier);
                    }
                  }
                }
              } else {
                if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[16])) {
                  identifier =
                      (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[16];
                  if (hashEQTLs.containsKey(identifier)) {
                    QTL = hashEQTLs.get(identifier);
                  }
                }
              }
            }
          } else {
            if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[4])) {
              identifier = (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[4];
              if (hashEQTLs.containsKey(identifier)) {
                QTL = hashEQTLs.get(identifier);
              }
            }
          }

          if (QTL == null) {

            // The eQTL, present in file 2 is not present in file 1:
            // if (Double.parseDouble(data[0]); < 1E-4) {
            if (hashTestedSNPsThatPassedQC == null
                || hashTestedSNPsThatPassedQC.contains(data[1])) {
              log.write(
                  "eQTL Present In New file But Not In Original File:\t"
                      + identifier
                      + "\t"
                      + data[0]
                      + "\t"
                      + data[2]
                      + "\t"
                      + data[3]
                      + "\t"
                      + data[16]
                      + "\n");
            }
            // }
            double zScore2 = Double.parseDouble(data[10]);
            //                        int posX = 500 + (int) 0;
            //                        int posY = 500 - (int) Math.round(zScore2 * 10);
            zs.draw(null, zScore2, 0, 1);

          } else {
            identifiersUsed.add(identifier);
            String[] eQtlData = QTL;
            boolean identicalProbe = true;
            String probe = data[4];
            String probeFound = eQtlData[4];
            if (!probe.equals(probeFound)) {
              identicalProbe = false;
            }

            hashUniqueProbesOverlap.add(data[4]);
            hashUniqueGenesOverlap.add(data[16]);
            if (!hashEQTLNrTimesAssessed.containsKey(identifier)) {
              hashEQTLNrTimesAssessed.put(identifier, 1);
            } else {
              hashEQTLNrTimesAssessed.put(identifier, 1 + hashEQTLNrTimesAssessed.get(identifier));
            }
            String alleles = eQtlData[8];
            String alleleAssessed = eQtlData[9];

            String correlations[] = (eQtlData[17]).split(";");
            double correlation = 0;
            int numCorr1 = 0;
            for (int c = 0; c < correlations.length; c++) {
              try {
                if (!correlations[c].equals("-")) {
                  correlation += Double.parseDouble(correlations[c]);
                  numCorr1++;
                }
              } catch (Exception e) {
              }
            }

            correlation /= (double) numCorr1;
            //                       if(numCorr1 == 0){
            //                           System.out.println("Warning: no correlations defined for
            // eqtl file 1");
            //                       }
            double zScore = Double.parseDouble(eQtlData[10]);
            //                        double pValue = Double.parseDouble(eQtlData[0]);
            String alleles2 = data[8];
            String alleleAssessed2 = data[9];
            double zScore2 = Double.parseDouble(data[10]);

            //                        double pValue2 = Double.parseDouble(data[0]);
            String correlations2[] = data[17].split(";");
            double correlation2 = 0;

            boolean alleleflipped = false;
            if (!alleleAssessed.equals(data[9])) {
              if (data[9].equals(eQtlData[8].split("/")[0])) {
                alleleflipped = true;
              } else {
                //                               System.out.println("WTF BBQ!");
              }
            }

            int numCorr2 = 0;
            for (int c = 0; c < correlations2.length; c++) {
              try {
                if (!correlations2[c].equals("-")) {

                  correlation2 += (Double.parseDouble(correlations2[c]));

                  numCorr2++;
                }
              } catch (NumberFormatException e) {
              }
            }
            //                       if(numCorr2 == 0){
            //                           System.out.println("Warning: no correlations defined for
            // eqtl file 2");
            //                       }
            correlation2 /= (double) numCorr2;
            if (alleleflipped) {
              correlation2 = -correlation2;
            }
            boolean sameDirection = false;
            int nrIdenticalAlleles = 0;
            if (alleles.length() > 2 && alleles2.length() > 2) {
              for (int a = 0; a < 3; a++) {
                for (int b = 0; b < 3; b++) {
                  if (a != 1 && b != 1) {
                    if (alleles.getBytes()[a] == alleles2.getBytes()[b]) {
                      nrIdenticalAlleles++;
                    }
                  }
                }
              }
            }

            if (nrIdenticalAlleles == 0) {
              alleles2 =
                  (char) BaseAnnot.getComplement((byte) alleles2.charAt(0))
                      + "/"
                      + (char) BaseAnnot.getComplement((byte) alleles2.charAt(2));
              alleleAssessed2 = BaseAnnot.getComplement(alleleAssessed2);
              if (alleles.length() > 2 && alleles2.length() > 2) {
                for (int a = 0; a < 3; a++) {
                  for (int b = 0; b < 3; b++) {
                    if (a != 1 && b != 1) {
                      if (alleles.getBytes()[a] == alleles2.getBytes()[b]) {
                        nrIdenticalAlleles++;
                      }
                    }
                  }
                }
              }
            }

            if (nrIdenticalAlleles != 2) {
              log.write(
                  "Error! SNPs have incompatible alleles!!:\t"
                      + alleles
                      + "\t"
                      + alleles2
                      + "\t"
                      + identifier
                      + "\n");
            } else {
              overlap++;
              if (!alleleAssessed.equals(alleleAssessed2)) {
                zScore2 = -zScore2;
                //                           correlation2 = -correlation2;
                alleleAssessed2 = alleleAssessed;
              }

              // Recode alleles:
              // if contains T, but no A, take complement
              //                        if (alleles.contains("T") && !alleles.contains("A")) {
              //                            alleles = BaseAnnot.getComplement(alleles);
              //                            alleleAssessed =
              // BaseAnnot.getComplement(alleleAssessed);
              //                            alleleAssessed2 =
              // BaseAnnot.getComplement(alleleAssessed2);
              //                        }
              if (zScore2 * zScore > 0) {
                sameDirection = true;
              }

              //                       if(correlation != correlation2 && (numCorr1 > 0 && numCorr2 >
              // 0)){
              //                           if(Math.abs(correlation - correlation2) > 0.00001){
              //                               System.out.println("Correlations are different:
              // "+lineno+"\t"+correlation +"\t"+correlation2+"\t"+str);
              //                           }
              //
              //                       }
              zs.draw(zScore, zScore2, 0, 1);
              if (!sameDirection) {
                nreQTLsOppositeDirection++;

                if (matchOnGeneName) {
                  disconcordantOut.append(
                      data[1]
                          + '\t'
                          + data[16]
                          + '\t'
                          + alleles
                          + '\t'
                          + alleleAssessed
                          + '\t'
                          + zScore
                          + '\t'
                          + alleles2
                          + '\t'
                          + alleleAssessed2
                          + '\t'
                          + zScore2);

                } else {
                  disconcordantOut.append(
                      data[1]
                          + '\t'
                          + data[4]
                          + '\t'
                          + alleles
                          + '\t'
                          + alleleAssessed
                          + '\t'
                          + zScore
                          + '\t'
                          + alleles2
                          + '\t'
                          + alleleAssessed2
                          + '\t'
                          + zScore2);
                }

                //                            int posX = 500 + (int) Math.round(zScore * 10);
                //                            int posY = 500 - (int) Math.round(zScore2 * 10);
                vecX.add(zScore);
                vecY.add(zScore2);

              } else {
                // write to output
                identicalOut.writeln(
                    identifier
                        + '\t'
                        + alleles
                        + '\t'
                        + alleleAssessed
                        + '\t'
                        + zScore
                        + '\t'
                        + alleles2
                        + '\t'
                        + alleleAssessed2
                        + '\t'
                        + zScore2);
                nreQTLsIdenticalDirection++;
                if (alleles.length() > 2
                    && !alleles.equals("A/T")
                    && !alleles.equals("T/A")
                    && !alleles.equals("C/G")
                    && !alleles.equals("G/C")) {
                  //                                int posX = 500 + (int) Math.round(zScore * 10);
                  //                                int posY = 500 - (int) Math.round(zScore2 * 10);
                  vecX.add(zScore);
                  vecY.add(zScore2);
                }
              }
            }
          }
        }
      }
    }
    identicalOut.close();
    disconcordantOut.close();
    in.close();
    log2.close();

    log.write(
        "\n/// Writing missing QTLs observed in original file but not in the new file ////\n\n");
    for (Entry<String, String[]> QTL : hashEQTLs.entrySet()) {
      if (!identifiersUsed.contains(QTL.getKey())) {
        // The eQTL, present in file 1 is not present in file 2:

        // if (Double.parseDouble(QTL.getValue()[0]) < 1E-4) {
        if (hashTestedSNPsThatPassedQC == null || hashTestedSNPsThatPassedQC.contains(data[1])) {
          log.write(
              "eQTL Present In Original file But Not In New File:\t"
                  + QTL.getKey()
                  + "\t"
                  + QTL.getValue()[0]
                  + "\t"
                  + QTL.getValue()[2]
                  + "\t"
                  + QTL.getValue()[3]
                  + "\t"
                  + QTL.getValue()[16]
                  + "\n");
        }
        // }
        double zScore = Double.parseDouble(QTL.getValue()[10]);
        //                int posX = 500 + (int) 0;
        //                int posY = 500 - (int) Math.round(zScore * 10);
        zs.draw(zScore, null, 0, 1);
      }
    }

    log.close();
    zs.write(zsOutFileName);

    double[] valsX = vecX.toArray();
    double[] valsY = vecY.toArray();

    if (valsX.length > 2) {
      double correlation = JSci.maths.ArrayMath.correlation(valsX, valsY);
      double r2 = correlation * correlation;

      cern.jet.random.tdouble.engine.DoubleRandomEngine randomEngine =
          new cern.jet.random.tdouble.engine.DRand();
      cern.jet.random.tdouble.StudentT tDistColt =
          new cern.jet.random.tdouble.StudentT(valsX.length - 2, randomEngine);
      double pValuePearson = 1;
      double tValue = correlation / (Math.sqrt((1 - r2) / (double) (valsX.length - 2)));
      if (tValue < 0) {
        pValuePearson = tDistColt.cdf(tValue);
      } else {
        pValuePearson = tDistColt.cdf(-tValue);
      }
      pValuePearson *= 2;
      System.out.println(
          "\nCorrelation between the Z-Scores of the overlapping set of eQTLs:\t"
              + correlation
              + "\tP-Value:\t"
              + pValuePearson);
    }

    TextFile outSummary = new TextFile(outputFile + "-Summary.txt", TextFile.W);

    System.out.println("");
    System.out.println(
        "Nr of eQTLs:\t"
            + hashEQTLs.size()
            + "\tin file:\t"
            + eQTL
            + "\tNrUniqueProbes:\t"
            + nrUniqueProbes
            + "\tNrUniqueGenes:\t"
            + nrUniqueGenes);
    outSummary.writeln(
        "Nr of eQTLs:\t"
            + hashEQTLs.size()
            + "\tin file:\t"
            + eQTL
            + "\tNrUniqueProbes:\t"
            + nrUniqueProbes
            + "\tNrUniqueGenes:\t"
            + nrUniqueGenes);

    System.out.println(
        "Nr of meQTLs:\t"
            + counterFile2
            + "\tin file:\t"
            + meQTL
            + "\tNrUniqueProbes:\t"
            + hashUniqueProbes2.size()
            + "\tNrUniqueGenes:\t"
            + hashUniqueGenes2.size()
            + " *With eQTM mapping.");
    outSummary.writeln(
        "Nr of meQTLs:\t"
            + counterFile2
            + "\tin file:\t"
            + meQTL
            + "\tNrUniqueProbes:\t"
            + hashUniqueProbes2.size()
            + "\tNrUniqueGenes:\t"
            + hashUniqueGenes2.size()
            + " *With eQTM mapping.");

    System.out.println("Skipped over meQTLs:\t" + skippedDueToMapping);
    outSummary.writeln("Skipped over meQTLs:\t" + skippedDueToMapping);

    System.out.println(
        "Overlap:\t"
            + overlap
            + "\tNrUniqueProbesOverlap:\t"
            + hashUniqueProbesOverlap.size()
            + "\tNrUniqueGenesOverlap:\t"
            + hashUniqueGenesOverlap.size());
    outSummary.writeln(
        "Overlap:\t"
            + overlap
            + "\tNrUniqueProbesOverlap:\t"
            + hashUniqueProbesOverlap.size()
            + "\tNrUniqueGenesOverlap:\t"
            + hashUniqueGenesOverlap.size());

    System.out.println("");
    outSummary.writeln();

    System.out.println("Nr eQTLs with identical direction:\t" + nreQTLsIdenticalDirection);
    outSummary.writeln("Nr eQTLs with identical direction:\t" + nreQTLsIdenticalDirection);

    double proportionOppositeDirection =
        100d
            * (double) nreQTLsOppositeDirection
            / (double) (nreQTLsOppositeDirection + nreQTLsIdenticalDirection);
    String proportionOppositeDirectionString =
        (new java.text.DecimalFormat(
                "0.00;-0.00", new java.text.DecimalFormatSymbols(java.util.Locale.US)))
            .format(proportionOppositeDirection);

    System.out.println(
        "Nr eQTLs with opposite direction:\t"
            + nreQTLsOppositeDirection
            + "\t("
            + proportionOppositeDirectionString
            + "%)");
    outSummary.writeln(
        "Nr eQTLs with opposite direction:\t"
            + nreQTLsOppositeDirection
            + "\t("
            + proportionOppositeDirectionString
            + "%)");

    outSummary.close();

    nrShared = hashUniqueProbesOverlap.size();
    nrOpposite = nreQTLsOppositeDirection;
  }