/** Displays the labels and the values for the panel. */
    protected void displayPnlFields(HashMap hmPnl) {
      if (hmPnl == null || hmPnl.isEmpty()) return;

      Iterator keySetItr = hmPnl.keySet().iterator();
      String strLabel = "";
      String strValue = "";

      // if the file is empty, then create an empty set of textfields.
      if (hmPnl == null || hmPnl.isEmpty()) {
        displayNewTxf("", "");
        return;
      }

      Container container = getParent();
      if (container != null) container.setVisible(false);
      try {
        // Get each set of label and value, and display them.
        while (keySetItr.hasNext()) {
          strLabel = (String) keySetItr.next();
          strValue = (String) hmPnl.get(strLabel);

          displayNewTxf(strLabel, strValue);
        }

        if (container != null) container.setVisible(true);
        revalidate();
        repaint();
      } catch (Exception e) {
        Messages.writeStackTrace(e);
        // e.printStackTrace();
        Messages.postDebug(e.toString());
      }
    }
Esempio n. 2
0
  /** Extracts document and cluster lists before serialization. */
  @Persist
  private void beforeSerialization() {
    /*
     * See http://issues.carrot2.org/browse/CARROT-693; this monitor does not save us
     * in multi-threaded environment anyway. A better solution would be to prepare
     * this eagerly in the constructor, but we try to balance overhead and full
     * correctness here.
     */
    synchronized (this) {
      query = (String) attributes.get(AttributeNames.QUERY);

      if (getDocuments() != null) {
        documents = Lists.newArrayList(getDocuments());
      } else {
        documents = null;
      }

      if (getClusters() != null) {
        clusters = Lists.newArrayList(getClusters());
      } else {
        clusters = null;
      }

      otherAttributesForSerialization = MapUtils.asHashMap(SimpleXmlWrappers.wrap(attributes));
      otherAttributesForSerialization.remove(AttributeNames.QUERY);
      otherAttributesForSerialization.remove(AttributeNames.CLUSTERS);
      otherAttributesForSerialization.remove(AttributeNames.DOCUMENTS);
      if (otherAttributesForSerialization.isEmpty()) {
        otherAttributesForSerialization = null;
      }
    }
  }
Esempio n. 3
0
 protected void pruneUploads() {
   if (uploads == null || uploads.isEmpty()) return;
   for (Iterator it = uploads.values().iterator(); it.hasNext(); ) {
     UploadStatus status = (UploadStatus) it.next();
     if (status.isDisposable()) {
       it.remove();
     }
   }
 }
Esempio n. 4
0
 private static void writeDuplicates() {
   Main.status("Writing duplicates.");
   if (!dup_post_list.isEmpty()) {
     Main.status(String.format("%s\t%s", "older_post", "newer_post"));
     for (Post post : dup_post_list.keySet()) {
       Main.status(String.format("%s\t%s", post.post_id, dup_post_list.get(post).post_id));
     }
   } else {
     Main.status("There are no duplicates.");
   }
   Main.status("Writing duplicates done.");
 }
  /*5/5/14, JTC, added persistent species data for players; system parameter masterSpeciesList,
  replaces mSpecies.
  Get previous timestep biomass for all species from web service*/
  public HashMap<Integer, SpeciesZoneType> getPrediction(
      String networkOrManipulationId,
      int startTimestep,
      int runTimestep,
      Map<Integer, Integer> addSpeciesNodeList,
      ZoneNodes zoneNodes)
      throws SimulationException {
    long milliseconds = System.currentTimeMillis();

    Log.printf("\nPrediction at %d\n", startTimestep);

    // Get previous timestep biomass for all species from web service
    // JTC, use new HashMap containing all current settings from zoneNodes, masterSpeciesList
    // HJR changing to make a deep copy here , I am getting a null while iterating
    HashMap<Integer, SpeciesZoneType> masterSpeciesList =
        new HashMap<Integer, SpeciesZoneType>(zoneNodes.getNodes());

    HashMap<Integer, SpeciesZoneType> mNewSpecies = new HashMap<Integer, SpeciesZoneType>();
    // JTC, mUpdateBiomass renamed from mUpdateSpecies
    HashMap<Integer, SpeciesZoneType> mUpdateBiomass = new HashMap<Integer, SpeciesZoneType>();
    // JTC, added new update type, mUpdateParams
    HashMap<Integer, SpeciesZoneType> mUpdateParams = new HashMap<Integer, SpeciesZoneType>();

    SpeciesZoneType szt;
    String nodeConfig = null;
    SimJob job = new SimJob();
    // {70=2494, 5=2000, 42=240, 14=1752, 31=1415}
    for (int node_id : addSpeciesNodeList.keySet()) {
      int addedBiomass = addSpeciesNodeList.get(node_id);

      if (!masterSpeciesList.containsKey(node_id)) {
        szt = createSpeciesZoneType(node_id, addedBiomass);
        mNewSpecies.put(node_id, szt);
        // jtc - 04/19/15
        masterSpeciesList.put(node_id, szt);
      } else {
        szt = masterSpeciesList.get(node_id);

        szt.setCurrentBiomass(Math.max(0, szt.getCurrentBiomass() + addedBiomass));
        szt.setBiomassUpdated(true);
      }
    }

    //      //JTC, separated this to capture biomass updates made to ZoneNodes that
    //      //are not received through addSpeciesNodeList (biomass and param updates)
    //      for (SpeciesZoneType species : masterSpeciesList.values()) {
    //          //param update also updates biomass, so insert into that list
    //          //preferentially; o/w use biomass update list
    //          if (species.paramUpdated) {
    //              mUpdateParams.put(species.getNodeIndex(), species);
    //              species.setParamUpdated(false);
    //          } else if (species.biomassUpdated) {
    //              mUpdateBiomass.put(species.getNodeIndex(), species);
    //              species.setBiomassUpdated(false);
    //          }
    //      }

    // Insert new species using web services
    if (!mNewSpecies.isEmpty()) {
      zoneNodes.addNodes(mNewSpecies);
    }
    try {
      nodeConfig =
          addMultipleSpeciesType(
              mNewSpecies, masterSpeciesList, startTimestep, false, networkOrManipulationId);
    } catch (Exception ex) {
      Log.println_e(ex.getMessage());
    }
    //      // Update biomass changes to existing species using web services
    //      if (!mUpdateBiomass.isEmpty()) {
    //          List<NodeBiomass> lNodeBiomass = new ArrayList<NodeBiomass>();
    //          for (SpeciesZoneType s : mUpdateBiomass.values()) {
    //              Log.printf("Updating Biomass: [%d] %s %f\n", s.getNodeIndex(), s.getName(),
    //                      s.getCurrentBiomass() / Constants.BIOMASS_SCALE);
    //              lNodeBiomass.add(new NodeBiomass(
    //                      s.getCurrentBiomass() / Constants.BIOMASS_SCALE, s.getNodeIndex()));
    //          }
    //          try {
    ////              updateBiomass(networkOrManipulationId, lNodeBiomass, startTimestep);
    //          } catch (Exception ex) {
    //              Log.println_e(ex.getMessage());
    //          }
    //      }

    //      // JTC Update changes to existing species parameters using web services (also
    //      // resubmits biomass, but couldn't find a way to do params w/o biomass
    //      if (!mUpdateParams.isEmpty()) {
    //          try {
    ////              increaseMultipleSpeciesType(
    ////                      mUpdateBiomass,
    ////                      masterSpeciesList,
    ////                      startTimestep,
    ////                      false,
    ////                      networkOrManipulationId
    ////              );
    //          } catch (Exception ex) {
    //              Log.println_e(ex.getMessage());
    //          }
    //      }

    //      run(startTimestep, runTimestep, networkOrManipulationId);

    // get new predicted biomass
    try {
      // JTC - changed variable from "mSpecies = " to "mUpdateBiomass = "
      // mUpdateBiomass = getBiomass(networkOrManipulationId, 0, startTimestep + runTimestep);
      if (!masterSpeciesList.isEmpty() || !mNewSpecies.isEmpty()) {
        mUpdateBiomass =
            submitManipRequest("ATN", nodeConfig, startTimestep + runTimestep, false, null);
      }
    } catch (Exception ex) {
      Log.println_e(ex.getMessage());
      return null;
    }
    //      getBiomassInfo(networkOrManipulationId);

    // JTC - add loop to update persistent player species biomass information
    SpeciesZoneType updS;
    for (SpeciesZoneType priorS : masterSpeciesList.values()) {
      System.out.println("priorS.nodeIndex " + priorS.nodeIndex);
      updS = mUpdateBiomass.get(priorS.nodeIndex);
      if (updS != null && updS.currentBiomass != 0) {
        masterSpeciesList
            .get(priorS.nodeIndex)
            .setCurrentBiomass(Math.ceil(updS.getCurrentBiomass()));
      }
      //          else {
      //              zoneNodes.removeNode(priorS.nodeIndex);
      //          }
    }

    Log.printf(
        "Total Time (Get Prediction): %.2f seconds",
        Math.round((System.currentTimeMillis() - milliseconds) / 10.0) / 100.0);

    return (HashMap) zoneNodes.getNodes();
  }
  /**
   * Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods.
   *
   * @param request servlet request
   * @param response servlet response
   * @throws javax.servlet.ServletException if a servlet-specific error occurs
   * @throws java.io.IOException if an I/O error occurs
   */
  protected void doGet(HttpServletRequest request, HttpServletResponse response)
      throws IOException, ServletException {
    XDebug xdebug = new XDebug();
    xdebug.startTimer();

    response.setContentType("application/json");
    PrintWriter writer = response.getWriter();

    try {
      List resultsList = new LinkedList();

      // Get the gene list
      String geneList = request.getParameter(QueryBuilder.GENE_LIST);
      if (request instanceof XssRequestWrapper) {
        geneList = ((XssRequestWrapper) request).getRawParameter(QueryBuilder.GENE_LIST);
      }
      String cancerStudyIdListString = request.getParameter(QueryBuilder.CANCER_STUDY_LIST);
      String[] cancerStudyIdList = cancerStudyIdListString.split(",");

      // Get the priority
      Integer dataTypePriority;
      try {
        dataTypePriority =
            Integer.parseInt(request.getParameter(QueryBuilder.DATA_PRIORITY).trim());
      } catch (NumberFormatException e) {
        dataTypePriority = 0;
      }

      //  Cancer All Cancer Studies
      List<CancerStudy> cancerStudiesList = accessControl.getCancerStudies();
      HashMap<String, Boolean> studyMap = new HashMap<>();
      for (String studyId : cancerStudyIdList) {
        studyMap.put(studyId, Boolean.TRUE);
      }
      for (CancerStudy cancerStudy : cancerStudiesList) {
        String cancerStudyId = cancerStudy.getCancerStudyStableId();
        if (!studyMap.containsKey(cancerStudyId)) {
          continue;
        }
        if (cancerStudyId.equalsIgnoreCase("all")) continue;

        Map cancerMap = new LinkedHashMap();
        cancerMap.put("studyId", cancerStudyId);
        cancerMap.put("typeOfCancer", cancerStudy.getTypeOfCancerId());

        //  Get all Genetic Profiles Associated with this Cancer Study ID.
        ArrayList<GeneticProfile> geneticProfileList =
            GetGeneticProfiles.getGeneticProfiles(cancerStudyId);

        //  Get all Patient Lists Associated with this Cancer Study ID.
        ArrayList<SampleList> sampleSetList = GetSampleLists.getSampleLists(cancerStudyId);

        //  Get the default patient set
        AnnotatedSampleSets annotatedSampleSets =
            new AnnotatedSampleSets(sampleSetList, dataTypePriority);
        SampleList defaultSampleSet = annotatedSampleSets.getDefaultSampleList();
        if (defaultSampleSet == null) {
          continue;
        }

        List<String> sampleIds = defaultSampleSet.getSampleList();

        //  Get the default genomic profiles
        CategorizedGeneticProfileSet categorizedGeneticProfileSet =
            new CategorizedGeneticProfileSet(geneticProfileList);
        HashMap<String, GeneticProfile> defaultGeneticProfileSet = null;
        switch (dataTypePriority) {
          case 2:
            defaultGeneticProfileSet = categorizedGeneticProfileSet.getDefaultCopyNumberMap();
            break;
          case 1:
            defaultGeneticProfileSet = categorizedGeneticProfileSet.getDefaultMutationMap();
            break;
          case 0:
          default:
            defaultGeneticProfileSet =
                categorizedGeneticProfileSet.getDefaultMutationAndCopyNumberMap();
        }

        String mutationProfile = "", cnaProfile = "";
        for (GeneticProfile geneticProfile : defaultGeneticProfileSet.values()) {
          GeneticAlterationType geneticAlterationType = geneticProfile.getGeneticAlterationType();
          if (geneticAlterationType.equals(GeneticAlterationType.COPY_NUMBER_ALTERATION)) {
            cnaProfile = geneticProfile.getStableId();
          } else if (geneticAlterationType.equals(GeneticAlterationType.MUTATION_EXTENDED)) {
            mutationProfile = geneticProfile.getStableId();
          }
        }

        cancerMap.put("mutationProfile", mutationProfile);
        cancerMap.put("cnaProfile", cnaProfile);

        cancerMap.put("caseSetId", defaultSampleSet.getStableId());
        cancerMap.put("caseSetLength", sampleIds.size());

        ProfileDataSummary genomicData =
            getGenomicData(
                cancerStudyId,
                defaultGeneticProfileSet,
                defaultSampleSet,
                geneList,
                sampleSetList,
                request,
                response);

        ArrayList<GeneWithScore> geneFrequencyList = genomicData.getGeneFrequencyList();
        ArrayList<String> genes = new ArrayList<String>();
        for (GeneWithScore geneWithScore : geneFrequencyList) genes.add(geneWithScore.getGene());
        int noOfMutated = 0,
            noOfCnaUp = 0,
            noOfCnaDown = 0,
            noOfCnaLoss = 0,
            noOfCnaGain = 0,
            noOfOther = 0,
            noOfAll = 0;

        boolean skipStudy = defaultGeneticProfileSet.isEmpty();
        if (!skipStudy) {

          for (String sampleId : sampleIds) {
            if (sampleId == null) {
              continue;
            }
            if (!genomicData.isCaseAltered(sampleId)) continue;

            boolean isAnyMutated = false,
                isAnyCnaUp = false,
                isAnyCnaDown = false,
                isAnyCnaLoss = false,
                isAnyCnaGain = false;

            for (String gene : genes) {
              isAnyMutated |= genomicData.isGeneMutated(gene, sampleId);
              GeneticTypeLevel cnaLevel = genomicData.getCNALevel(gene, sampleId);
              boolean isCnaUp = cnaLevel != null && cnaLevel.equals(GeneticTypeLevel.Amplified);
              isAnyCnaUp |= isCnaUp;
              boolean isCnaDown =
                  cnaLevel != null && cnaLevel.equals(GeneticTypeLevel.HomozygouslyDeleted);
              isAnyCnaDown |= isCnaDown;
              boolean isCnaLoss =
                  cnaLevel != null && cnaLevel.equals(GeneticTypeLevel.HemizygouslyDeleted);
              isAnyCnaLoss |= isCnaLoss;
              boolean isCnaGain = cnaLevel != null && cnaLevel.equals(GeneticTypeLevel.Gained);
              isAnyCnaGain |= isCnaGain;
            }

            boolean isAnyCnaChanged = isAnyCnaUp || isAnyCnaDown;
            if (isAnyMutated && !isAnyCnaChanged) noOfMutated++;
            else if (isAnyMutated && isAnyCnaChanged) noOfOther++;
            else if (isAnyCnaUp) noOfCnaUp++;
            else if (isAnyCnaDown) noOfCnaDown++;
            else if (isAnyCnaGain) noOfCnaGain++;
            else if (isAnyCnaLoss) noOfCnaLoss++;

            noOfAll++;
          }
        }

        Map alterations = new LinkedHashMap();
        cancerMap.put("alterations", alterations);
        alterations.put("all", noOfAll);
        alterations.put("mutation", noOfMutated);
        alterations.put("cnaUp", noOfCnaUp);
        alterations.put("cnaDown", noOfCnaDown);
        alterations.put("cnaLoss", noOfCnaLoss);
        alterations.put("cnaGain", noOfCnaGain);
        alterations.put("other", noOfOther);
        cancerMap.put("genes", genes);
        cancerMap.put("skipped", skipStudy);

        resultsList.add(cancerMap);
      }

      JSONValue.writeJSONString(resultsList, writer);
    } catch (DaoException e) {
      throw new ServletException(e);
    } catch (ProtocolException e) {
      throw new ServletException(e);
    } finally {
      writer.close();
    }
  }