Пример #1
0
 @Override
 public void duplicate(Solver solver, THashMap<Object, Object> identitymap) {
   if (!identitymap.containsKey(this)) {
     this.var.duplicate(solver, identitymap);
     OffsetView clone = new OffsetView((IntVar) identitymap.get(this.var), this.cste, solver);
     identitymap.put(this, clone);
   }
 }
  @Override
  public String serialize() {

    StringBuffer res = new StringBuffer();

    for (String s : branches.keySet())
      res.append(this.item_id + "\t" + s + "\t" + branches.get(s) + "\n");

    // togliamo lo \n finale
    return res.substring(0, res.length() - 2);
  }
Пример #3
0
  @Override
  public void duplicate(Solver solver, THashMap<Object, Object> identitymap) {
    if (!identitymap.containsKey(this)) {
      this.vars[0].duplicate(solver, identitymap);
      IntVar X = (IntVar) identitymap.get(this.vars[0]);
      this.vars[1].duplicate(solver, identitymap);
      IntVar Y = (IntVar) identitymap.get(this.vars[1]);

      identitymap.put(this, new PropEqualX_YC(new IntVar[] {X, Y}, this.cste));
    }
  }
 @Override
 public void duplicate(Solver solver, THashMap<Object, Object> identitymap) {
   if (!identitymap.containsKey(this)) {
     int size = vars.length;
     RealVar[] rvars = new RealVar[size];
     for (int i = 0; i < size; i++) {
       vars[i].duplicate(solver, identitymap);
       rvars[i] = (RealVar) identitymap.get(vars[i]);
     }
     identitymap.put(this, new RealPropagator(functions, rvars, option));
   }
 }
Пример #5
0
  public User(boolean mTrue) {
    try {
      ResultSet mResult = Legion.getDetabase().executeQuery("SELECT * FROM  `users`");

      while (mResult.next()) {
        User h = new User();
        h.Id = mResult.getInt("id");
        h.userName = mResult.getString("username");
        h.realName = mResult.getString("realname");
        h.Look = mResult.getString("look");
        h.SSO = mResult.getString("ssoTicket");
        h.Email = mResult.getString("email");
        h.Mission = mResult.getString("mission");
        h.Coin = mResult.getInt("coins");
        h.OtherCoin = mResult.getInt("othercoins");
        h.Score = mResult.getInt("score");
        h.Gender = mResult.getString("Gender");
        h.Respects = mResult.getInt("Respects");
        h.maxUserRespects = mResult.getInt("Max_Users_Respects");
        h.maxPetsRespects = mResult.getInt("Max_Pets_Respects");
        h.Rank = mResult.getInt("rank");
        h.bannTime = mResult.getInt("ban_time");
        h.banCount = mResult.getInt("ban_count");
        h.habboClubInit = mResult.getString("Habbo_Club_Init");
        h.habboClubExpire = mResult.getString("Habbo_Club_Expire");
        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
        Date date = new Date();
        Date mDate = dateFormat.parse(h.habboClubExpire);
        h.Habbo_Club = (date.getTime() < mDate.getTime());
        h.isBanned = mResult.getBoolean("ban");
        h.isKuick = mResult.getBoolean("kuick");
        h.registredTime = mResult.getString("registred_time");
        h.lastLogin = mResult.getString("last_login");
        h.isOnline = false;
        h.isInRoom = false;
        h.isWriting = false;
        h.isWalking = false;
        h.x = 0;
        h.y = 0;
        h.z = 0.0;
        h.rot = 0;
        h.rothead = 0;
        getUserFromId.put(h.Id, h);
      }
      System.out.println("Loaded ".concat(getUserFromId.size() + " ").concat("user/s."));
      System.gc();
    } catch (Exception ex) {
      ex.printStackTrace();
    }
  }
  public THashMap<String, String> findCoordinates() {

    THashMap<String, String> airportGeoSet = new THashMap<String, String>();
    // extract all characteristics of certain variables

    System.out.println("Finding coordinates from Airports");

    StringTokenizer st;
    String airportCode, longitude, latitude;

    try {
      BufferedReader br = new BufferedReader(new FileReader(new File(dirURL_geo)));
      System.out.println("scan " + dirURL_geo);

      String line = br.readLine();

      // for(int i = numberOfLinesToReadTestMode; i > 0; --i){
      while (line != null) {
        st = new StringTokenizer(line, ",");

        // jump to airport code and removes "".
        airportCode = st.nextToken();
        if (debug) System.out.print("   " + airportCode);

        // jump to latitude and removes "".
        latitude = st.nextToken();
        if (debug) System.out.print("   " + latitude);

        // jump to longitude and removes "".
        longitude = st.nextToken();
        if (debug) System.out.println("   " + longitude + "\n");

        // put airportCode as key and lat,long as value in airportGeoSet, hashmap.
        airportGeoSet.put(airportCode, new String(latitude + ";" + longitude));
        // if(debug) System.out.println(airportGeoSet);

        // System.out.println(airportCode+";"+airportGeoSet.get(airportCode));

        line = br.readLine();
      }
      br.close();
    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }

    return airportGeoSet;
  }
Пример #7
0
  public int computeMinDist() {
    int min = Integer.MAX_VALUE;
    this.minCommonNode = NULL_NODE;
    for (Entry<Long, Integer> n : distSource.entrySet()) {
      long node = n.getKey();
      if (distTarget.contains(node)) {
        int sumDist = n.getValue() + distTarget.get(node);
        if (sumDist < min) {
          min = sumDist;
          this.minCommonNode = node;
        }
      }
    }

    if (min == Integer.MAX_VALUE) min = -1;
    return min;
  }
Пример #8
0
  public void setUpdate(boolean online) {
    for (Friend friend : friends.values()) {
      if (H4J.getNetwork().getSessionManager().getOnlineStatusById(friend.getId())) { // if online
        Session channel = H4J.getNetwork().getSessionManager().getSessionById(friend.getId());

        channel.respond(UpdateFriendStateMessageComposer.compose(online, this.user));
      }
    }
  }
Пример #9
0
  public boolean friendshipExists(int id) {

    for (Friend friend : friends.values()) {
      if (friend.getId() == id) {
        return true;
      }
    }
    return false;
  }
    @Override
    public void duplicate(Solver solver, THashMap<Object, Object> identitymap) {
      if (!identitymap.containsKey(this)) {
        int size = this.n;

        IntVar[] iVars = new IntVar[size];
        for (int i = 0; i < size; i++) {
          this.vars[i].duplicate(solver, identitymap);
          iVars[i] = (IntVar) identitymap.get(this.vars[i]);
        }
        RealVar[] rVars = new RealVar[size];
        for (int i = 0; i < size; i++) {
          this.vars[i + n].duplicate(solver, identitymap);
          rVars[i] = (RealVar) identitymap.get(this.vars[i + n]);
        }
        identitymap.put(this, new PropIntEqReal(iVars, rVars, epsilon));
      }
    }
 @Override
 public D duplicate(Solver solver, THashMap<Object, Object> identitymap) {
   int size = this.scope.length;
   IntVar[] aVars = new IntVar[size];
   for (int i = 0; i < size; i++) {
     this.scope[i].duplicate(solver, identitymap);
     aVars[i] = (IntVar) identitymap.get(this.scope[i]);
   }
   return new AutoDiffDetection(aVars);
 }
Пример #12
0
  @Override
  public void duplicate(Solver solver, THashMap<Object, Object> identitymap) {
    if (!identitymap.containsKey(this)) {
      int s1 = sets.length;
      SetVar[] set1 = new SetVar[s1];
      for (int i = 0; i < s1; i++) {
        sets[i].duplicate(solver, identitymap);
        set1[i] = (SetVar) identitymap.get(sets[i]);
      }

      int s2 = invsets.length;
      SetVar[] set2 = new SetVar[s2];
      for (int i = 0; i < s2; i++) {
        invsets[i].duplicate(solver, identitymap);
        set2[i] = (SetVar) identitymap.get(invsets[i]);
      }

      identitymap.put(this, new PropInverse(set1, set2, offSet1, offSet2));
    }
  }
Пример #13
0
 /**
  * Build an unbounded {@link solver.variables.Variable} named {@code name}, defined by {@code
  * type}.
  *
  * @param name name of the variable
  * @param expression
  * @param map
  * @param solver @return {@link solver.variables.Variable}
  */
 private static IntVar buildWithInt(
     String name, Expression expression, THashMap<String, Object> map, Solver solver) {
   final IntVar iv;
   if (expression != null) {
     iv = buildOnExpression(DEBUG ? name : NO_NAME, expression, map, solver);
   } else {
     iv = VariableFactory.bounded(DEBUG ? name : NO_NAME, -999999, 999999, solver);
   }
   map.put(name, iv);
   return iv;
 }
Пример #14
0
 /**
  * Build a {@link solver.variables.Variable} named {@code name}.
  *
  * @param name name of the boolean variable
  * @param map
  * @param solver
  * @return {@link solver.variables.Variable}
  */
 private static BoolVar buildWithBool(
     String name, Expression expression, THashMap<String, Object> map, Solver solver) {
   final BoolVar bi;
   if (expression != null) {
     bi = (BoolVar) buildOnExpression(DEBUG ? name : NO_NAME, expression, map, solver);
   } else {
     bi = VariableFactory.bool(DEBUG ? name : NO_NAME, solver);
   }
   map.put(name, bi);
   return bi;
 }
Пример #15
0
  /**
   * Build an array of <? extends {@link solver.variables.Variable}>. </br>WARNING: array's indice
   * are from 1 to n.
   *
   * @param name name of the array of variables.</br> Each variable is named like {@code name}_i.
   * @param type {@link parser.flatzinc.ast.declaration.DArray} object.
   * @param earr array of {@link parser.flatzinc.ast.expression.Expression}
   * @param map
   * @param solver
   */
  private static void buildWithDArray(
      String name,
      DArray type,
      Expression expression,
      EArray earr,
      THashMap<String, Object> map,
      Solver solver) {
    final DInt2 index = (DInt2) type.getIndex(0);
    // no need to get lowB, it is always 1 (see specification of FZN for more informations)
    final int size = index.getUpp();
    final Declaration what = type.getWhat();

    switch (what.typeOf) {
      case BOOL:
        final BoolVar[] bs = new BoolVar[size];
        for (int i = 0; i < size; i++) {
          bs[i] = earr.getWhat_i(i).boolVarValue(solver);
        }
        map.put(name, bs);
        break;
      case INT:
      case INT2:
      case INTN:
        final IntVar[] vs = new IntVar[size];
        for (int i = 0; i < size; i++) {
          vs[i] = earr.getWhat_i(i).intVarValue(solver);
        }
        map.put(name, vs);
        break;
      case SET:
        //                final SetVariable[] svs = new SetVariable[size];
        //                for (int i = 0; i < size; i++) {
        //                    svs[i] = earr.getWhat_i(i).setVarValue();
        //                }
        //                map.put(name, svs);
        Exit.log("SET VAR");
        break;
      default:
        break;
    }
  }
Пример #16
0
 private static void readAnnotations(
     String name,
     Declaration type,
     List<EAnnotation> expressions,
     FZNLayout layout,
     THashMap<String, Object> map) {
   for (int i = 0; i < expressions.size(); i++) {
     Expression expression = expressions.get(i);
     Expression.EType etype = expression.getTypeOf();
     Annotation varanno = Annotation.none;
     switch (etype) {
       case IDE:
         EIdentifier identifier = (EIdentifier) expression;
         varanno = Annotation.valueOf((identifier).value);
         break;
       case ANN:
         EAnnotation eanno = (EAnnotation) expression;
         varanno = Annotation.valueOf(eanno.id.value);
         break;
       default:
         //                    LOGGER.warn("% Unknown annotation :" + type.toString());
     }
     switch (varanno) {
       case output_var:
         IntVar var = (IntVar) map.get(name);
         layout.addOutputVar(name, var, type);
         break;
       case output_array:
         EAnnotation eanno = (EAnnotation) expression;
         IntVar[] vars = (IntVar[]) map.get(name);
         layout.addOutputArrays(name, vars, eanno.exps, type);
         break;
       default:
         // LOGGER.warn("% Unknown annotation :" + varanno.toString());
     }
     break;
   }
 }
Пример #17
0
 public void sendAlertToAllUsers(String Message) {
   try {
     ServerMessage Alert = new ServerMessage(Outconming.OldAlert);
     Alert.writeInt(1);
     Alert.writeUTF(Message);
     Iterator reader = getUserFromId.entrySet().iterator();
     while (reader.hasNext()) {
       User nUser = (User) (((Map.Entry) reader.next()).getValue());
       if (nUser.isOnline) Alert.Send(nUser.Session);
     }
   } catch (Exception e) {
     e.printStackTrace();
   }
 }
Пример #18
0
 private static IntVar buildOnExpression(
     String name, Expression expression, THashMap<String, Object> map, Solver solver) {
   final IntVar iv;
   switch (expression.getTypeOf()) {
     case BOO:
       iv = expression.boolVarValue(solver);
       break;
     case INT:
       iv = VariableFactory.fixed(name, expression.intValue(), solver);
       break;
     case IDE:
       iv = VariableFactory.eq((IntVar) map.get(expression.toString()));
       break;
     case IDA:
       EIdArray eida = (EIdArray) expression;
       iv = ((IntVar[]) map.get(eida.name))[eida.index - 1];
       break;
     default:
       iv = null;
       Exit.log("Unknown expression");
   }
   return iv;
 }
Пример #19
0
 public static List<User> searchHabbo(String SSO) {
   List<User> mUser = new ArrayList<User>();
   try {
     Iterator reader = getUserFromId.entrySet().iterator();
     while (reader.hasNext()) {
       User nUser = (User) (((Map.Entry) reader.next()).getValue());
       if (nUser.userName.toUpperCase().contains(SSO.toUpperCase())) {
         mUser.add(nUser);
       }
     }
   } catch (Exception ex) {
     // ex.printStackTrace();
     return mUser;
   }
   return mUser;
 }
Пример #20
0
 /**
  * Build a {@link solver.variables.Variable} named {@code name}, defined by {@code type}. {@code
  * type} is expected to be a {@link parser.flatzinc.ast.declaration.DManyInt} object.
  *
  * @param name name of the variable
  * @param type {@link parser.flatzinc.ast.declaration.DManyInt} object.
  * @param map
  * @param solver
  * @return {@link solver.variables.Variable}
  */
 private static IntVar buildWithManyInt(
     String name,
     DManyInt type,
     Expression expression,
     THashMap<String, Object> map,
     Solver solver) {
   final IntVar iv;
   if (expression != null) {
     iv = buildOnExpression(DEBUG ? name : NO_NAME, expression, map, solver);
     int[] values = type.getValues();
     solver.post(IntConstraintFactory.member(iv, values));
   } else {
     iv = VariableFactory.enumerated(DEBUG ? name : NO_NAME, type.getValues(), solver);
   }
   map.put(name, iv);
   return iv;
 }
Пример #21
0
 public static User getHabboFromName(String SSO) {
   User mResult;
   try {
     mResult = null;
     Iterator reader = getUserFromId.entrySet().iterator();
     while (reader.hasNext()) {
       User nUser = (User) (((Map.Entry) reader.next()).getValue());
       if (nUser.userName.equals(SSO)) {
         return nUser;
       }
     }
   } catch (Exception ex) {
     ex.printStackTrace();
     return null;
   }
   return mResult;
 }
Пример #22
0
 /**
  * Build a {@link solver.variables.Variable} named {@code name}, defined by {@code type}.
  *
  * @param name name of the variable
  * @param type {@link parser.flatzinc.ast.declaration.DInt2} object
  * @param map
  * @param solver
  * @return {@link solver.variables.Variable}
  */
 private static IntVar buildWithInt2(
     String name, DInt2 type, Expression expression, THashMap<String, Object> map, Solver solver) {
   final IntVar iv;
   if (expression != null) {
     iv = buildOnExpression(DEBUG ? name : NO_NAME, expression, map, solver);
     int lb = type.getLow();
     int ub = type.getUpp();
     solver.post(IntConstraintFactory.member(iv, lb, ub));
   } else {
     int size = type.getUpp() - type.getLow() + 1;
     if (size < 256) {
       iv =
           VariableFactory.enumerated(
               DEBUG ? name : NO_NAME, type.getLow(), type.getUpp(), solver);
     } else {
       iv = VariableFactory.bounded(DEBUG ? name : NO_NAME, type.getLow(), type.getUpp(), solver);
     }
   }
   map.put(name, iv);
   return iv;
 }
Пример #23
0
  public static User getHabboFromSSO(String SSO) {
    User mResult = null;
    try {
      Iterator reader = getUserFromId.entrySet().iterator();

      while (reader.hasNext()) {
        User nUser = (User) (((Map.Entry) reader.next()).getValue());
        if (nUser.SSO.equals(SSO)) {
          return nUser;
        }
      }
      ResultSet mResult2 =
          Legion.getDetabase()
              .executeQuery("SELECT * FROM  `users` WHERE `ssoTicket`='" + SSO + "'");
      if (mResult2.next()) {
        User h = new User();
        h.Id = mResult2.getInt("id");
        h.userName = mResult2.getString("username");
        h.realName = mResult2.getString("realname");
        h.Look = mResult2.getString("look");
        h.SSO = mResult2.getString("ssoTicket");
        h.Email = mResult2.getString("email");
        h.Mission = mResult2.getString("mission");
        h.Coin = mResult2.getInt("coins");
        h.OtherCoin = mResult2.getInt("othercoins");
        h.Score = mResult2.getInt("score");
        h.Gender = mResult2.getString("Gender");
        h.Respects = mResult2.getInt("Respects");
        h.maxUserRespects = mResult2.getInt("Max_Users_Respects");
        h.maxPetsRespects = mResult2.getInt("Max_Pets_Respects");
        h.Rank = mResult2.getInt("rank");
        h.bannTime = mResult2.getInt("ban_time");
        h.banCount = mResult2.getInt("ban_count");
        h.habboClubInit = mResult2.getString("Habbo_Club_Init");
        h.habboClubExpire = mResult2.getString("Habbo_Club_Expire");
        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
        Date date = new Date();
        Date mDate = dateFormat.parse(h.habboClubExpire);
        h.Habbo_Club = (date.getTime() < mDate.getTime());
        h.isBanned = mResult2.getBoolean("ban");
        h.isKuick = mResult2.getBoolean("kuick");
        h.registredTime = mResult2.getString("registred_time");
        h.lastLogin = mResult2.getString("last_login");
        h.isOnline = false;
        h.isInRoom = false;
        h.isWriting = false;
        h.isWalking = false;
        h.x = 0;
        h.y = 0;
        h.z = 0.0;
        h.rot = 0;
        h.rothead = 0;
        if (!getUserFromId.containsKey(h.Id)) getUserFromId.put(h.Id, h);
        else {
          getUserFromId.remove(h.Id);
          getUserFromId.put(h.Id, h);
        }
        mResult = h;
      }
    } catch (Exception ex) {
      ex.printStackTrace();
      return null;
    }
    return mResult;
  }
  /**
   * This function reads in the raw data and constructs and saves the data needed in one csv file
   */
  public void findFlights() {

    ArrayList<String> foundCarriers = new ArrayList<String>();
    ArrayList<String> foundAirports = new ArrayList<String>();

    // Save folder
    try {
      BufferedWriter bw =
          new BufferedWriter(
              new FileWriter(new File(dirURL_segmentSave + File.separator + output + extension)));
      bw.write(
          "year,quarterID,itinID,marketID,origin,destination,airportGroup,tkChange,tkGroup,opGroup,tkCarrier,passengers,marketFare,marketDistance,nonstopDistance\n");

      // open all flight data files and extract.
      for (int j = startyear; j <= endyear; j++) {
        for (int i = 1; i <= 4; i++) {
          long time2 = System.currentTimeMillis();
          BufferedReader br =
              new BufferedReader(
                  new FileReader(
                      new File(
                          dirURL_segment
                              + File.separator
                              + fileSegment
                              + j
                              + "_"
                              + i
                              + extension)));
          System.out.println("scanning market " + j + " - " + i + " and saving it to the csv...");

          String itinID,
              marketID,
              airportGroup,
              origin,
              destination,
              tkChange,
              tkGroup,
              opGroup,
              passengers,
              marketFare,
              marketDistance,
              nonstopDistance,
              tkCarrier;

          String line = br.readLine();

          // Problem with string tokenizer (does not understand ","). So we use matcher.
          // never change the number of groups in this pattern (2)! :-)
          Pattern csvPattern = Pattern.compile("(\"[^\"]*\")|(?<=,|^)([^,]*)(?:,|$)");
          Matcher matcher = csvPattern.matcher(line);
          int numberOfTokensInEveryLine = 0;

          while (matcher.find()) {
            ++numberOfTokensInEveryLine;
          }

          line = br.readLine(); // Jump to second line to skip header.

          String match;
          String[] tokens;
          int counter;

          int k = numberOfLinesToReadTestMode;

          while (k != 0 && line != null) {
            --k;

            tokens = new String[numberOfTokensInEveryLine];
            counter = 0;
            matcher = csvPattern.matcher(line);
            while (matcher.find()) {
              match = matcher.group(1);
              if (match != null) {
                tokens[counter++] = match;
              } else {
                tokens[counter++] = matcher.group(2);
              }
            }

            // Itinerary ID.
            itinID = tokens[0];
            itinID = itinID.substring(1, itinID.length() - 1);
            if (debug) System.out.print("   " + itinID + " - ");

            // Market ID.
            marketID = tokens[1];
            marketID = marketID.substring(1, marketID.length() - 1);
            if (debug) System.out.print(marketID + " - ");

            // Origin.
            origin = tokens[5];
            origin = origin.substring(1, origin.length() - 1);
            if (debug) System.out.print(origin + " - ");

            // Destination.
            destination = tokens[13];
            destination = destination.substring(1, destination.length() - 1);
            if (debug) System.out.print(destination + " - ");

            airportGroup = tokens[21];
            airportGroup = airportGroup.substring(1, airportGroup.length() - 1);
            if (debug) System.out.print(airportGroup + " - ");

            // Ticketing Carrier Change.
            tkChange = tokens[23];
            tkChange = tkChange.substring(0, tkChange.length() - 3);
            if (debug) System.out.print(tkChange + " - ");

            // Ticketing Carrier Group.
            tkGroup = tokens[24];
            tkGroup = tkGroup.substring(1, tkGroup.length() - 1);
            if (debug) System.out.print(tkGroup + " - ");

            // Operating  Carrier Group.
            opGroup = tokens[26];
            opGroup = opGroup.substring(1, opGroup.length() - 1);
            if (debug) System.out.print(opGroup + " - ");

            // Ticketing Carrier, 99 if it changed.
            tkCarrier = tokens[28];
            tkCarrier = tkCarrier.substring(1, tkCarrier.length() - 1);
            if (debug) System.out.print(tkCarrier + " - ");

            // Passengers
            passengers = tokens[31];
            passengers = passengers.substring(0, passengers.length() - 3);
            if (debug) System.out.print(passengers + " - ");

            // MarketFare.
            marketFare = tokens[32];
            marketFare = marketFare.substring(0, marketFare.length() - 3);
            if (debug) System.out.print(marketFare + " - ");

            // MarketDistance.
            marketDistance = tokens[33];
            marketDistance = marketDistance.substring(0, marketDistance.length() - 3);
            if (debug) System.out.print(marketDistance + " - ");

            // NonstopDistance.
            nonstopDistance = tokens[36];
            nonstopDistance = nonstopDistance.substring(0, nonstopDistance.length() - 3);
            if (debug) System.out.print(nonstopDistance + "\n");

            // Abfrage ob Carrier schon in ArrayList, wenn nicht hinzuf�gen.

            if (!foundCarriers.contains(tkCarrier)) {
              foundCarriers.add(tkCarrier);
            }

            // Abfrage ob Airport schon in ArrayList, wenn nicht hinzuf�gen.

            if (!foundAirports.contains(origin)) {
              foundAirports.add(origin);
            }

            if (!foundAirports.contains(destination)) {
              foundAirports.add(destination);
            }

            // Laufende Zahl des Quartals berechnen und quarterID setzen.
            int quarterInt = Integer.valueOf(i);
            int quarterID = (j - 1993) * 4 + quarterInt - 1;

            // Market-Data speichern

            bw.write(
                j
                    + ","
                    + quarterID
                    + ","
                    + itinID
                    + ","
                    + marketID
                    + ","
                    + origin
                    + ","
                    + destination
                    + ","
                    + airportGroup
                    + ","
                    + tkChange
                    + ","
                    + tkGroup
                    + ","
                    + opGroup
                    + ","
                    + tkCarrier
                    + ","
                    + passengers
                    + ","
                    + marketFare
                    + ","
                    + marketDistance
                    + ","
                    + nonstopDistance
                    + "\n");

            line = br.readLine();
          }

          br.close();

          System.out.println(
              "   ... took " + (System.currentTimeMillis() - time2) / 1000 + " seconds.");
        }
      }

      bw.close();

      // find and print all airports found in the data
      THashMap<String, String> airportGeoSet = findCoordinates();
      BufferedWriter bwgeo =
          new BufferedWriter(
              new FileWriter(
                  new File(dirURL_segmentSave + File.separator + output + "_Geo" + extension)));

      bwgeo.write("airport;latitude;longitude\n"); // BEWARE Fixed Header!!

      Collections.sort(foundAirports);

      String key = null, value;
      for (Iterator<String> keysIterator = foundAirports.iterator(); keysIterator.hasNext(); ) {
        key = keysIterator.next();
        value = airportGeoSet.get(key);
        // System.out.println(key);

        bwgeo.write(key + ";" + value + "\n");
      }
      bwgeo.close();
      System.out.println("saved airport geo data as csv");

    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
Пример #25
0
 /**
  * Build an array of <? extends {@link solver.variables.Variable}>. </br>WARNING: array's indice
  * are from 1 to n.
  *
  * @param name name of the array of variables.</br> Each variable is named like {@code name}_i.
  * @param type {@link parser.flatzinc.ast.declaration.DArray} object.
  * @param map
  * @param solver
  */
 private static void buildWithDArray(
     String name,
     DArray type,
     Expression expression,
     THashMap<String, Object> map,
     Solver solver) {
   final DInt2 index = (DInt2) type.getIndex(0);
   // no need to get lowB, it is always 1 (see specification of FZN for more informations)
   final int size = index.getUpp();
   final Declaration what = type.getWhat();
   final IntVar[] vs;
   switch (what.typeOf) {
     case BOOL:
       BoolVar[] bs = new BoolVar[size];
       if (expression == null) {
         for (int i = 1; i <= size; i++) {
           bs[i - 1] = buildWithBool(name + '_' + i, expression, map, solver);
         }
       } else if (expression.getTypeOf().equals(Expression.EType.ARR)) {
         EArray array = (EArray) expression;
         // build the array
         for (int i = 0; i < size; i++) {
           bs[i] = array.getWhat_i(i).boolVarValue(solver);
         }
       }
       map.put(name, bs);
       break;
     case INT:
       vs = new IntVar[size];
       if (expression == null) {
         for (int i = 1; i <= size; i++) {
           vs[i - 1] = buildWithInt(name + '_' + i, null, map, solver);
         }
       } else if (expression.getTypeOf().equals(Expression.EType.ARR)) {
         buildFromIntArray(vs, (EArray) expression, size, solver);
       }
       map.put(name, vs);
       break;
     case INT2:
       vs = new IntVar[size];
       if (expression == null) {
         for (int i = 1; i <= size; i++) {
           vs[i - 1] = buildWithInt2(name + '_' + i, (DInt2) what, expression, map, solver);
         }
       } else if (expression.getTypeOf().equals(Expression.EType.ARR)) {
         buildFromIntArray(vs, (EArray) expression, size, solver);
       }
       map.put(name, vs);
       break;
     case INTN:
       vs = new IntVar[size];
       if (expression == null) {
         for (int i = 1; i <= size; i++) {
           vs[i - 1] = buildWithManyInt(name + '_' + i, (DManyInt) what, expression, map, solver);
         }
       } else if (expression.getTypeOf().equals(Expression.EType.ARR)) {
         buildFromIntArray(vs, (EArray) expression, size, solver);
       }
       map.put(name, vs);
       break;
     case SET:
       //                final SetVariable[] svs = new SetVariable[size];
       //                for (int i = 1; i <= size; i++) {
       //                    svs[i - 1] = buildWithSet(name + '_' + i, (DSet) what, map);
       //                }
       //                map.put(name, svs);
       Exit.log("SET VAR");
       break;
     default:
       break;
   }
 }
Пример #26
0
  public final void compareOverlapAndZScoreDirectionTwoEQTLFiles(
      String eQTL,
      String meQTL,
      String eQTMFile,
      String outputFile,
      boolean matchOnGeneName,
      double fdrCutt,
      boolean matchSnpOnPos,
      boolean splitGeneNames,
      boolean flipUsingEQTM,
      boolean topeffect)
      throws IOException, Exception {
    System.out.println("Performing comparison of eQTLs and meQTLs");
    double filterOnFDR =
        fdrCutt; // Do we want to use another FDR measure? When set to -1 this is not used at all.

    HashSet<String> hashExcludeEQTLs =
        new HashSet<
            String>(); // We can exclude some eQTLs from the analysis. If requested, put the entire
                       // eQTL string in this HashMap for each eQTL. Does not work in combination
                       // with mathcing based on chr and pos
    HashSet<String> hashConfineAnalysisToSubsetOfProbes =
        new HashSet<
            String>(); // We can confine the analysis to only a subset of probes. If requested put
                       // the probe name in this HapMap
    HashSet<String> hashTestedSNPsThatPassedQC =
        null; // We can confine the analysis to only those eQTLs for which the SNP has been
              // successfully passed QC, otherwise sometimes unfair comparisons are made. If
              // requested, put the SNP name in this HashMap

    // Load the eQTM File
    QTLTextFile eQTLsTextFile = new QTLTextFile(eQTMFile, QTLTextFile.R);

    HashMap<String, ArrayList<EQTL>> eQtmInfo = new HashMap<String, ArrayList<EQTL>>();

    for (Iterator<EQTL> eQtlIt = eQTLsTextFile.getEQtlIterator(); eQtlIt.hasNext(); ) {
      EQTL eQtm = eQtlIt.next();
      String eQtmKey = eQtm.getRsName();

      if (!eQtm.getAlleleAssessed().equals("C")) {
        eQtm.setAlleleAssessed("C");
        eQtm.setZscore(eQtm.getZscore() * -1);

        Double[] zscores = eQtm.getDatasetZScores();
        Double[] correlation = eQtm.getCorrelations();
        for (int i = 0; i < eQtm.getDatasets().length; ++i) {
          zscores[i] *= -1;
          correlation[i] *= -1;
        }
        eQtm.setDatasetZScores(zscores);
        eQtm.setCorrelations(correlation);
      }

      ArrayList<EQTL> posEqtls = eQtmInfo.get(eQtmKey);

      if (posEqtls == null) {
        posEqtls = new ArrayList<EQTL>(1);
        posEqtls.add(eQtm);
        eQtmInfo.put(eQtmKey, posEqtls);
      } else if (!topeffect) {
        eQtmInfo.get(eQtmKey).add(eQtm);
      }
    }

    System.out.println("eQTMs read in: " + eQtmInfo.size());

    // Now load the eQTLs for file 1:
    THashMap<String, String[]> hashEQTLs = new THashMap<String, String[]>();
    THashSet<String> hashUniqueProbes = new THashSet<String>();
    THashSet<String> hashUniqueGenes = new THashSet<String>();

    TextFile in = new TextFile(eQTL, TextFile.R);
    in.readLine();
    String[] data = in.readLineElemsReturnReference(SPLIT_ON_TAB);

    if (data.length < 5) {
      throw new IllegalStateException(
          "QTL File does not have enough columns. Detected columns: "
              + data.length
              + " in file "
              + in.getFileName());
    }

    while (data != null) {
      if (filterOnFDR == -1 || Double.parseDouble(data[18]) <= filterOnFDR) {
        if (hashConfineAnalysisToSubsetOfProbes.isEmpty()
            || hashConfineAnalysisToSubsetOfProbes.contains(data[4])) {
          if (matchOnGeneName) {
            if (data[16].length() > 1) {

              if (splitGeneNames) {
                for (String gene : SEMI_COLON_PATTERN.split(data[16])) {

                  hashEQTLs.put(
                      (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + gene, data);
                  hashUniqueProbes.add(data[4]);
                  hashUniqueGenes.add(gene);
                }
              } else {

                if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[16])) {
                  hashEQTLs.put(
                      (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[16], data);
                  hashUniqueProbes.add(data[4]);
                  hashUniqueGenes.add(data[16]);
                  // log.write("Added eQTL from original file " + (matchSnpOnPos ? data[2] + ":" +
                  // data[3] : data[1]) + "\t" + data[16]);
                }
              }
            }
          } else {
            if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[4])) {
              hashEQTLs.put(
                  (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[4], data);
              hashUniqueProbes.add(data[4]);
              hashUniqueGenes.add(data[16]);
              //	log.write("Added eQTL from original file " + (matchSnpOnPos ? data[2] + ":" +
              // data[3] : data[1]) + "\t" + data[4]);
            }
          }
        }
        data = in.readLineElemsReturnReference(SPLIT_ON_TAB);
      }
    }
    in.close();

    int nrUniqueProbes = hashUniqueProbes.size();
    int nrUniqueGenes = hashUniqueGenes.size();
    hashUniqueProbes = null;
    hashUniqueGenes = null;

    // Initialize Graphics2D for the Z-Score allelic direction comparison:
    //        int width = 1000;
    //        int height = 1000;
    //        int margin = 100;
    //        int x0 = margin;
    //        int x1 = width - margin;
    //        int y0 = margin;
    //        int y1 = height - margin;

    ZScorePlot zs = new ZScorePlot();
    String zsOutFileName = outputFile + "-ZScoreComparison.pdf";
    zs.init(2, new String[] {"eQTLs", "meQTLs"}, true, zsOutFileName);

    // Variables holding variousStatistics:
    int nreQTLsIdenticalDirection = 0;
    int nreQTLsOppositeDirection = 0;
    HashMap<String, Integer> hashEQTLNrTimesAssessed = new HashMap<String, Integer>();

    THashSet<String> hashEQTLs2 = new THashSet<String>();
    THashSet<String> hashUniqueProbes2 = new THashSet<String>();
    THashSet<String> hashUniqueGenes2 = new THashSet<String>();
    THashSet<String> hashUniqueProbesOverlap = new THashSet<String>();
    THashSet<String> hashUniqueGenesOverlap = new THashSet<String>();

    int counterFile2 = 0;
    int overlap = 0;
    ArrayDoubleList vecX = new ArrayDoubleList();
    ArrayDoubleList vecY = new ArrayDoubleList();

    // Vector holding all opposite allelic effects:
    //        LinkedHashSet<String> vecOppositeEQTLs = new LinkedHashSet<String>();

    // Now process file 2:
    in = new TextFile(meQTL, TextFile.R);
    in.readLine();

    int skippedDueToMapping = 0;
    data = null;
    TextFile identicalOut =
        new TextFile(outputFile + "-eQTLsWithIdenticalDirecton.txt.gz", TextFile.W);
    TextFile disconcordantOut = new TextFile(outputFile + "-OppositeEQTLs.txt", TextFile.W);
    TextFile log = new TextFile(outputFile + "-eQTL-meQTL-ComparisonLog.txt", TextFile.W);
    TextFile log2 = new TextFile(outputFile + "-eQTM-missingnessLog.txt", TextFile.W);

    THashSet<String> identifiersUsed = new THashSet<String>();

    while ((data = in.readLineElemsReturnReference(SPLIT_ON_TAB)) != null) {

      if (filterOnFDR == -1 || Double.parseDouble(data[18]) <= filterOnFDR) {
        if (!eQtmInfo.containsKey(data[4])) {
          skippedDueToMapping++;
          log2.write(
              "meQTL probe not present In eQTM file:\t"
                  + data[4]
                  + ", effect statistics: \t"
                  + data[0]
                  + "\t"
                  + data[2]
                  + "\t"
                  + data[3]
                  + "\t"
                  + data[16]
                  + "\n");
          continue;
        }

        String orgDataFour = data[4];

        for (int i = 0; i < eQtmInfo.get(orgDataFour).size(); ++i) {
          if (topeffect && i > 0) {
            break;
          }
          data[16] = eQtmInfo.get(orgDataFour).get(i).getProbeHUGO();
          data[4] = eQtmInfo.get(orgDataFour).get(i).getProbe();

          if (flipUsingEQTM) {
            Double zScoreQTM = eQtmInfo.get(orgDataFour).get(i).getZscore();
            if (zScoreQTM < 0) {
              data[10] = String.valueOf(Double.parseDouble(data[10]) * -1);
            }
          }

          if (hashConfineAnalysisToSubsetOfProbes.isEmpty()
              || hashConfineAnalysisToSubsetOfProbes.contains(data[4])) {
            if (matchOnGeneName) {
              if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[16])) {
                if (data[16].length() > 1) {

                  if (splitGeneNames) {
                    for (String gene : SEMI_COLON_PATTERN.split(data[16])) {

                      hashUniqueProbes2.add(data[4]);
                      hashUniqueGenes2.add(gene);
                      if (!hashEQTLs2.contains(
                          (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + gene)) {
                        hashEQTLs2.add(
                            (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + gene);
                        counterFile2++;
                      }
                    }
                  } else {

                    hashUniqueProbes2.add(data[4]);
                    hashUniqueGenes2.add(data[16]);
                    if (!hashEQTLs2.contains(
                        (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[16])) {
                      hashEQTLs2.add(
                          (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[16]);
                      counterFile2++;
                    }
                  }
                }
              }
            } else {
              if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[4])) {
                // hashEQTLs2.put(data[1] + "\t" + data[4], str);
                hashUniqueProbes2.add(data[4]);
                hashUniqueGenes2.add(data[16]);
                counterFile2++;
              }
            }
          }
          String[] QTL = null;
          String identifier = null;
          if (matchOnGeneName) {

            if (data.length > 16 && data[16].length() > 1) {
              if (splitGeneNames) {
                // NB Plotting and processing of all QTLs here is not okay!
                for (String gene : SEMI_COLON_PATTERN.split(data[16])) {
                  if (!hashExcludeEQTLs.contains(data[1] + "\t" + gene)) {
                    identifier = (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + gene;
                    if (hashEQTLs.containsKey(identifier)) {
                      QTL = hashEQTLs.get(identifier);
                    }
                  }
                }
              } else {
                if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[16])) {
                  identifier =
                      (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[16];
                  if (hashEQTLs.containsKey(identifier)) {
                    QTL = hashEQTLs.get(identifier);
                  }
                }
              }
            }
          } else {
            if (!hashExcludeEQTLs.contains(data[1] + "\t" + data[4])) {
              identifier = (matchSnpOnPos ? data[2] + ":" + data[3] : data[1]) + "\t" + data[4];
              if (hashEQTLs.containsKey(identifier)) {
                QTL = hashEQTLs.get(identifier);
              }
            }
          }

          if (QTL == null) {

            // The eQTL, present in file 2 is not present in file 1:
            // if (Double.parseDouble(data[0]); < 1E-4) {
            if (hashTestedSNPsThatPassedQC == null
                || hashTestedSNPsThatPassedQC.contains(data[1])) {
              log.write(
                  "eQTL Present In New file But Not In Original File:\t"
                      + identifier
                      + "\t"
                      + data[0]
                      + "\t"
                      + data[2]
                      + "\t"
                      + data[3]
                      + "\t"
                      + data[16]
                      + "\n");
            }
            // }
            double zScore2 = Double.parseDouble(data[10]);
            //                        int posX = 500 + (int) 0;
            //                        int posY = 500 - (int) Math.round(zScore2 * 10);
            zs.draw(null, zScore2, 0, 1);

          } else {
            identifiersUsed.add(identifier);
            String[] eQtlData = QTL;
            boolean identicalProbe = true;
            String probe = data[4];
            String probeFound = eQtlData[4];
            if (!probe.equals(probeFound)) {
              identicalProbe = false;
            }

            hashUniqueProbesOverlap.add(data[4]);
            hashUniqueGenesOverlap.add(data[16]);
            if (!hashEQTLNrTimesAssessed.containsKey(identifier)) {
              hashEQTLNrTimesAssessed.put(identifier, 1);
            } else {
              hashEQTLNrTimesAssessed.put(identifier, 1 + hashEQTLNrTimesAssessed.get(identifier));
            }
            String alleles = eQtlData[8];
            String alleleAssessed = eQtlData[9];

            String correlations[] = (eQtlData[17]).split(";");
            double correlation = 0;
            int numCorr1 = 0;
            for (int c = 0; c < correlations.length; c++) {
              try {
                if (!correlations[c].equals("-")) {
                  correlation += Double.parseDouble(correlations[c]);
                  numCorr1++;
                }
              } catch (Exception e) {
              }
            }

            correlation /= (double) numCorr1;
            //                       if(numCorr1 == 0){
            //                           System.out.println("Warning: no correlations defined for
            // eqtl file 1");
            //                       }
            double zScore = Double.parseDouble(eQtlData[10]);
            //                        double pValue = Double.parseDouble(eQtlData[0]);
            String alleles2 = data[8];
            String alleleAssessed2 = data[9];
            double zScore2 = Double.parseDouble(data[10]);

            //                        double pValue2 = Double.parseDouble(data[0]);
            String correlations2[] = data[17].split(";");
            double correlation2 = 0;

            boolean alleleflipped = false;
            if (!alleleAssessed.equals(data[9])) {
              if (data[9].equals(eQtlData[8].split("/")[0])) {
                alleleflipped = true;
              } else {
                //                               System.out.println("WTF BBQ!");
              }
            }

            int numCorr2 = 0;
            for (int c = 0; c < correlations2.length; c++) {
              try {
                if (!correlations2[c].equals("-")) {

                  correlation2 += (Double.parseDouble(correlations2[c]));

                  numCorr2++;
                }
              } catch (NumberFormatException e) {
              }
            }
            //                       if(numCorr2 == 0){
            //                           System.out.println("Warning: no correlations defined for
            // eqtl file 2");
            //                       }
            correlation2 /= (double) numCorr2;
            if (alleleflipped) {
              correlation2 = -correlation2;
            }
            boolean sameDirection = false;
            int nrIdenticalAlleles = 0;
            if (alleles.length() > 2 && alleles2.length() > 2) {
              for (int a = 0; a < 3; a++) {
                for (int b = 0; b < 3; b++) {
                  if (a != 1 && b != 1) {
                    if (alleles.getBytes()[a] == alleles2.getBytes()[b]) {
                      nrIdenticalAlleles++;
                    }
                  }
                }
              }
            }

            if (nrIdenticalAlleles == 0) {
              alleles2 =
                  (char) BaseAnnot.getComplement((byte) alleles2.charAt(0))
                      + "/"
                      + (char) BaseAnnot.getComplement((byte) alleles2.charAt(2));
              alleleAssessed2 = BaseAnnot.getComplement(alleleAssessed2);
              if (alleles.length() > 2 && alleles2.length() > 2) {
                for (int a = 0; a < 3; a++) {
                  for (int b = 0; b < 3; b++) {
                    if (a != 1 && b != 1) {
                      if (alleles.getBytes()[a] == alleles2.getBytes()[b]) {
                        nrIdenticalAlleles++;
                      }
                    }
                  }
                }
              }
            }

            if (nrIdenticalAlleles != 2) {
              log.write(
                  "Error! SNPs have incompatible alleles!!:\t"
                      + alleles
                      + "\t"
                      + alleles2
                      + "\t"
                      + identifier
                      + "\n");
            } else {
              overlap++;
              if (!alleleAssessed.equals(alleleAssessed2)) {
                zScore2 = -zScore2;
                //                           correlation2 = -correlation2;
                alleleAssessed2 = alleleAssessed;
              }

              // Recode alleles:
              // if contains T, but no A, take complement
              //                        if (alleles.contains("T") && !alleles.contains("A")) {
              //                            alleles = BaseAnnot.getComplement(alleles);
              //                            alleleAssessed =
              // BaseAnnot.getComplement(alleleAssessed);
              //                            alleleAssessed2 =
              // BaseAnnot.getComplement(alleleAssessed2);
              //                        }
              if (zScore2 * zScore > 0) {
                sameDirection = true;
              }

              //                       if(correlation != correlation2 && (numCorr1 > 0 && numCorr2 >
              // 0)){
              //                           if(Math.abs(correlation - correlation2) > 0.00001){
              //                               System.out.println("Correlations are different:
              // "+lineno+"\t"+correlation +"\t"+correlation2+"\t"+str);
              //                           }
              //
              //                       }
              zs.draw(zScore, zScore2, 0, 1);
              if (!sameDirection) {
                nreQTLsOppositeDirection++;

                if (matchOnGeneName) {
                  disconcordantOut.append(
                      data[1]
                          + '\t'
                          + data[16]
                          + '\t'
                          + alleles
                          + '\t'
                          + alleleAssessed
                          + '\t'
                          + zScore
                          + '\t'
                          + alleles2
                          + '\t'
                          + alleleAssessed2
                          + '\t'
                          + zScore2);

                } else {
                  disconcordantOut.append(
                      data[1]
                          + '\t'
                          + data[4]
                          + '\t'
                          + alleles
                          + '\t'
                          + alleleAssessed
                          + '\t'
                          + zScore
                          + '\t'
                          + alleles2
                          + '\t'
                          + alleleAssessed2
                          + '\t'
                          + zScore2);
                }

                //                            int posX = 500 + (int) Math.round(zScore * 10);
                //                            int posY = 500 - (int) Math.round(zScore2 * 10);
                vecX.add(zScore);
                vecY.add(zScore2);

              } else {
                // write to output
                identicalOut.writeln(
                    identifier
                        + '\t'
                        + alleles
                        + '\t'
                        + alleleAssessed
                        + '\t'
                        + zScore
                        + '\t'
                        + alleles2
                        + '\t'
                        + alleleAssessed2
                        + '\t'
                        + zScore2);
                nreQTLsIdenticalDirection++;
                if (alleles.length() > 2
                    && !alleles.equals("A/T")
                    && !alleles.equals("T/A")
                    && !alleles.equals("C/G")
                    && !alleles.equals("G/C")) {
                  //                                int posX = 500 + (int) Math.round(zScore * 10);
                  //                                int posY = 500 - (int) Math.round(zScore2 * 10);
                  vecX.add(zScore);
                  vecY.add(zScore2);
                }
              }
            }
          }
        }
      }
    }
    identicalOut.close();
    disconcordantOut.close();
    in.close();
    log2.close();

    log.write(
        "\n/// Writing missing QTLs observed in original file but not in the new file ////\n\n");
    for (Entry<String, String[]> QTL : hashEQTLs.entrySet()) {
      if (!identifiersUsed.contains(QTL.getKey())) {
        // The eQTL, present in file 1 is not present in file 2:

        // if (Double.parseDouble(QTL.getValue()[0]) < 1E-4) {
        if (hashTestedSNPsThatPassedQC == null || hashTestedSNPsThatPassedQC.contains(data[1])) {
          log.write(
              "eQTL Present In Original file But Not In New File:\t"
                  + QTL.getKey()
                  + "\t"
                  + QTL.getValue()[0]
                  + "\t"
                  + QTL.getValue()[2]
                  + "\t"
                  + QTL.getValue()[3]
                  + "\t"
                  + QTL.getValue()[16]
                  + "\n");
        }
        // }
        double zScore = Double.parseDouble(QTL.getValue()[10]);
        //                int posX = 500 + (int) 0;
        //                int posY = 500 - (int) Math.round(zScore * 10);
        zs.draw(zScore, null, 0, 1);
      }
    }

    log.close();
    zs.write(zsOutFileName);

    double[] valsX = vecX.toArray();
    double[] valsY = vecY.toArray();

    if (valsX.length > 2) {
      double correlation = JSci.maths.ArrayMath.correlation(valsX, valsY);
      double r2 = correlation * correlation;

      cern.jet.random.tdouble.engine.DoubleRandomEngine randomEngine =
          new cern.jet.random.tdouble.engine.DRand();
      cern.jet.random.tdouble.StudentT tDistColt =
          new cern.jet.random.tdouble.StudentT(valsX.length - 2, randomEngine);
      double pValuePearson = 1;
      double tValue = correlation / (Math.sqrt((1 - r2) / (double) (valsX.length - 2)));
      if (tValue < 0) {
        pValuePearson = tDistColt.cdf(tValue);
      } else {
        pValuePearson = tDistColt.cdf(-tValue);
      }
      pValuePearson *= 2;
      System.out.println(
          "\nCorrelation between the Z-Scores of the overlapping set of eQTLs:\t"
              + correlation
              + "\tP-Value:\t"
              + pValuePearson);
    }

    TextFile outSummary = new TextFile(outputFile + "-Summary.txt", TextFile.W);

    System.out.println("");
    System.out.println(
        "Nr of eQTLs:\t"
            + hashEQTLs.size()
            + "\tin file:\t"
            + eQTL
            + "\tNrUniqueProbes:\t"
            + nrUniqueProbes
            + "\tNrUniqueGenes:\t"
            + nrUniqueGenes);
    outSummary.writeln(
        "Nr of eQTLs:\t"
            + hashEQTLs.size()
            + "\tin file:\t"
            + eQTL
            + "\tNrUniqueProbes:\t"
            + nrUniqueProbes
            + "\tNrUniqueGenes:\t"
            + nrUniqueGenes);

    System.out.println(
        "Nr of meQTLs:\t"
            + counterFile2
            + "\tin file:\t"
            + meQTL
            + "\tNrUniqueProbes:\t"
            + hashUniqueProbes2.size()
            + "\tNrUniqueGenes:\t"
            + hashUniqueGenes2.size()
            + " *With eQTM mapping.");
    outSummary.writeln(
        "Nr of meQTLs:\t"
            + counterFile2
            + "\tin file:\t"
            + meQTL
            + "\tNrUniqueProbes:\t"
            + hashUniqueProbes2.size()
            + "\tNrUniqueGenes:\t"
            + hashUniqueGenes2.size()
            + " *With eQTM mapping.");

    System.out.println("Skipped over meQTLs:\t" + skippedDueToMapping);
    outSummary.writeln("Skipped over meQTLs:\t" + skippedDueToMapping);

    System.out.println(
        "Overlap:\t"
            + overlap
            + "\tNrUniqueProbesOverlap:\t"
            + hashUniqueProbesOverlap.size()
            + "\tNrUniqueGenesOverlap:\t"
            + hashUniqueGenesOverlap.size());
    outSummary.writeln(
        "Overlap:\t"
            + overlap
            + "\tNrUniqueProbesOverlap:\t"
            + hashUniqueProbesOverlap.size()
            + "\tNrUniqueGenesOverlap:\t"
            + hashUniqueGenesOverlap.size());

    System.out.println("");
    outSummary.writeln();

    System.out.println("Nr eQTLs with identical direction:\t" + nreQTLsIdenticalDirection);
    outSummary.writeln("Nr eQTLs with identical direction:\t" + nreQTLsIdenticalDirection);

    double proportionOppositeDirection =
        100d
            * (double) nreQTLsOppositeDirection
            / (double) (nreQTLsOppositeDirection + nreQTLsIdenticalDirection);
    String proportionOppositeDirectionString =
        (new java.text.DecimalFormat(
                "0.00;-0.00", new java.text.DecimalFormatSymbols(java.util.Locale.US)))
            .format(proportionOppositeDirection);

    System.out.println(
        "Nr eQTLs with opposite direction:\t"
            + nreQTLsOppositeDirection
            + "\t("
            + proportionOppositeDirectionString
            + "%)");
    outSummary.writeln(
        "Nr eQTLs with opposite direction:\t"
            + nreQTLsOppositeDirection
            + "\t("
            + proportionOppositeDirectionString
            + "%)");

    outSummary.close();

    nrShared = hashUniqueProbesOverlap.size();
    nrOpposite = nreQTLsOppositeDirection;
  }
 @Override
 public int size() {
   return branches.size();
 }