Esempio n. 1
0
  /** {@inheritDoc} */
  public ActionForward execute(
      @SuppressWarnings("unused") ActionMapping mapping,
      @SuppressWarnings("unused") ActionForm form,
      HttpServletRequest request,
      HttpServletResponse response)
      throws Exception {
    String type = request.getParameter("type");
    if (type == null) {
      type = "webapp";
    }

    if ("webapp".equals(type)) {
      response.getOutputStream().print("OK");
    } else if ("query".equals(type)) {
      final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession());
      ObjectStore os = im.getObjectStore();
      Query q = new Query();
      QueryClass c = new QueryClass(InterMineObject.class);
      q.addFrom(c);
      q.addToSelect(c);
      // Add a unique value to the select to avoid caching the query
      QueryValue token = new QueryValue(System.currentTimeMillis());
      q.addToSelect(token);
      Results r = os.execute(q, 1, false, false, false);
      if (r.get(0) != null) {
        response.getOutputStream().print("OK");
      } else {
        response.getOutputStream().print("NO RESULTS");
      }
    }
    return null;
  }
  /**
   * Populate the SequenceFeature.locatedFeatures() collection for: Gene, Transcript, Exon and CDS
   *
   * @throws Exception if anything goes wrong
   */
  public void populateCollection() throws Exception {
    Map<String, SOTerm> soTerms = populateSOTermMap(osw);
    Query q = getAllParents();
    Results res = osw.getObjectStore().execute(q);
    Iterator<Object> resIter = res.iterator();
    osw.beginTransaction();
    int parentCount = 0;
    int childCount = 0;

    while (resIter.hasNext()) {
      ResultsRow<InterMineObject> rr = (ResultsRow<InterMineObject>) resIter.next();
      InterMineObject parent = rr.get(0);
      SOTerm soTerm = (SOTerm) rr.get(1);
      InterMineObject o = PostProcessUtil.cloneInterMineObject(parent);
      Set<InterMineObject> newCollection = getChildFeatures(soTerms, soTerm, o);
      if (newCollection != null && !newCollection.isEmpty()) {
        o.setFieldValue(TARGET_COLLECTION, newCollection);
        osw.store(o);
        parentCount++;
        childCount += newCollection.size();
      }
    }
    osw.commitTransaction();
    LOG.info("Stored " + childCount + " child features for " + parentCount + " parent features");
  }
Esempio n. 3
0
 /**
  * Calls ObjectStore.releaseGoFaster() if this object wraps a Results object from an
  * ObjectStoreInterMineImpl.
  *
  * @throws ObjectStoreException if ObjectStoreInterMineImpl.releaseGoFaster() throws the exception
  */
 public synchronized void releaseGoFaster() throws ObjectStoreException {
   goingFaster--;
   if (goingFaster == 0) {
     ObjectStore os = osResults.getObjectStore();
     if (os instanceof ObjectStoreInterMineImpl) {
       ((ObjectStoreInterMineImpl) os).releaseGoFaster(osResults.getQuery());
     }
   }
 }
Esempio n. 4
0
 /**
  * Calls ObjectStore.goFaster() if this object wraps a Results object from an
  * ObjectStoreInterMineImpl.
  *
  * @throws ObjectStoreException if ObjectStoreInterMineImpl.goFaster() throws the exception
  */
 public synchronized void goFaster() throws ObjectStoreException {
   goingFaster++;
   if (goingFaster == 1) {
     osResults = changeResultBatchSize(osResults, BIG_BATCH_SIZE);
     ObjectStore os = osResults.getObjectStore();
     if (os instanceof ObjectStoreInterMineImpl) {
       ((ObjectStoreInterMineImpl) os).goFaster(osResults.getQuery());
     }
   }
 }
  @Test
  public void test() throws Exception {
    Model m = osw.getModel();

    Long start = System.currentTimeMillis();

    PathQuery pq = new PathQuery(m);

    pq.addViews("Types.name", "Types.intType", "Types.doubleType");
    pq.addConstraint(Constraints.eq("Types.name", "histo*"));

    Query q = MainHelper.makeSummaryQuery(pq, "Types.intType", new HashMap(), new HashMap(), null);

    Results res = osw.execute(q, 100000, true, false, false);

    Long sum = 0L;
    Map<Integer, Long> actual = new TreeMap<Integer, Long>();

    for (Object o : res) {
      // System.out.println("ROW:" + o);
      List row = (List) o;
      Integer bucket = (Integer) row.get(5);
      Integer min = (Integer) row.get(0);
      Integer max = (Integer) row.get(1);
      Integer buckets = (Integer) row.get(4);
      Integer width = (max - min) / (buckets - 1);
      Integer group = min + ((bucket - 1) * width);
      Long count = ((BigDecimal) row.get(6)).longValue();
      actual.put(group, count);
      sum += count;
    }
    Long postExecution = System.currentTimeMillis();
    showHistogram("Actual", actual);
    System.out.printf(
        "MIN:        %d\nMAX:        %d\nAVG:        %.03f\nSTD-DEV:    %.03f\n",
        ((List) res.get(0)).subList(0, 4).toArray());
    System.out.println("TOTAL THINGS: " + sum);
    System.out.printf(
        "Query composition and execution took %.4f seconds",
        Double.valueOf(postExecution - start) / 1000);

    res = osw.execute(getCheckQuery());
    Long countFromOSQ = null;
    for (Object o : res) {
      List row = (List) o;
      countFromOSQ = (Long) row.get(0);
    }
    // int scale = generator.nextInt(MAX_SCALE - MIN_SCALE) + MIN_SCALE;

    assertEquals("Sum of buckets and total count agrees", sum, countFromOSQ);
    assertEquals("Sum of buckets agrees with what we inserted", made, sum.intValue());
  }
  public void testFailFast() throws Exception {
    Query q1 = new Query();
    QueryClass qc1 = new QueryClass(Employee.class);
    q1.addFrom(qc1);
    q1.addToSelect(qc1);

    Results r1 = writer.execute(q1);
    writer.store(data.get("EmployeeA1"));
    try {
      r1.iterator().hasNext();
      fail("Expected: ConcurrentModificationException");
    } catch (ConcurrentModificationException e) {
    }
  }
 public void testTranslation() throws Exception {
   ObjectStore os2 =
       new ObjectStoreTranslatingImpl(
           Model.getInstanceByName("testmodel"),
           ObjectStoreFactory.getObjectStore("os.unittest"),
           new CompanyTranslator());
   Query q = new Query();
   QueryClass qc = new QueryClass(InterMineObject.class);
   q.addToSelect(qc);
   q.addFrom(qc);
   Results res = os2.execute(q);
   assertEquals(2, res.size());
   assertEquals("CompanyA", ((Bank) ((ResultsRow) res.get(0)).get(0)).getName());
   assertEquals("CompanyB", ((Bank) ((ResultsRow) res.get(1)).get(0)).getName());
 }
Esempio n. 8
0
 /**
  * Create a new WebResults object.
  *
  * @param pathQuery used to get the paths of the columns
  * @param results the underlying Results object
  * @param im intermine API
  * @param pathToQueryNode the mapping between Paths (in the columnPaths argument) and the
  *     QueryNodes in the results object
  * @param pathToBagQueryResult a Map containing results from LOOKUP operations
  */
 public WebResults(
     InterMineAPI im,
     PathQuery pathQuery,
     Results results,
     Map<String, QuerySelectable> pathToQueryNode,
     Map<String, BagQueryResult> pathToBagQueryResult) {
   this.im = im;
   this.osResults = results;
   this.flatResults = new ResultsFlatOuterJoinsImpl(((List) osResults), osResults.getQuery());
   model = im.getModel();
   this.columnPaths = new ArrayList<Path>();
   try {
     for (String pathString : pathQuery.getView()) {
       this.columnPaths.add(pathQuery.makePath(pathString));
     }
   } catch (PathException e) {
     throw new RuntimeException("Error creating WebResults because PathQuery is invalid", e);
   }
   classKeys = im.getClassKeys();
   this.pathToQueryNode = new HashMap<String, QuerySelectable>();
   if (pathToQueryNode != null) {
     this.pathToQueryNode.putAll(pathToQueryNode);
   }
   this.pathToBagQueryResult = new HashMap<String, BagQueryResult>();
   if (pathToBagQueryResult != null) {
     this.pathToBagQueryResult.putAll(pathToBagQueryResult);
   }
   this.pathQuery = pathQuery;
   pathToIndex = getPathToIndex();
   redirector = im.getLinkRedirector();
   addColumnsInternal(columnPaths);
 }
  /** Test that transactions do actually commit and that isInTransaction() works. */
  public void testCommitTransactions() throws Exception {
    Address address1 = new Address();
    address1.setAddress("Address 1");
    Address address2 = new Address();
    address2.setAddress("Address 2");

    Query q = new Query();
    QueryClass qcAddress = new QueryClass(Address.class);
    QueryField qf = new QueryField(qcAddress, "address");
    ConstraintSet cs1 = new ConstraintSet(ConstraintOp.OR);
    cs1.addConstraint(new SimpleConstraint(qf, ConstraintOp.MATCHES, new QueryValue("Address%")));
    q.addToSelect(qcAddress);
    q.addFrom(qcAddress);
    q.addToOrderBy(qf);
    q.setConstraint(cs1);

    try {
      writer.beginTransaction();
      assertTrue(writer.isInTransaction());

      writer.store(address1);
      writer.store(address2);

      // Should be nothing in OS until we commit
      Results res = realOs.execute(q);
      assertEquals(0, res.size());

      // However, they should be in the WRITER.
      // TODO: These lines now fail, because we do not allow querying on writers with uncommitted
      // data. The writer should relax this restriction.
      res = writer.execute(q);
      assertEquals(2, res.size());

      writer.commitTransaction();
      assertFalse(writer.isInTransaction());
      res = realOs.execute(q);
      assertEquals(2, res.size());
      assertEquals(address1, (Address) ((ResultsRow) res.get(0)).get(0));
      assertEquals(address2, (Address) ((ResultsRow) res.get(1)).get(0));

    } finally {
      writer.delete(address1);
      writer.delete(address2);
    }
  }
Esempio n. 10
0
  /**
   * Create and ObjectStore query from a PathQuery and execute it, returning results in a format
   * appropriate for displaying a web table.
   *
   * @param pathQuery the query to execute
   * @param pathToBagQueryResult will be populated with results from bag queries used in any LOOKUP
   *     constraints
   * @return results in a format appropriate for display in a web page table
   * @throws ObjectStoreException if problem running query
   */
  public WebResults execute(PathQuery pathQuery, Map<String, BagQueryResult> pathToBagQueryResult)
      throws ObjectStoreException {
    Map<String, QuerySelectable> pathToQueryNode = new HashMap<String, QuerySelectable>();

    Query q = makeQuery(pathQuery, pathToBagQueryResult, pathToQueryNode);

    Results results = os.execute(q, Constants.BATCH_SIZE, true, true, false);

    Query realQ = results.getQuery();
    if (realQ == q) {
      queryToPathToQueryNode.put(q, pathToQueryNode);
    } else {
      pathToQueryNode = queryToPathToQueryNode.get(realQ);
    }

    WebResults webResults =
        new WebResults(im, pathQuery, results, pathToQueryNode, pathToBagQueryResult);

    return webResults;
  }
  /**
   * @param os object store
   * @return map of name to so term
   * @throws ObjectStoreException if something goes wrong
   */
  protected Map<String, SOTerm> populateSOTermMap(ObjectStore os) throws ObjectStoreException {
    Map<String, SOTerm> soTerms = new HashMap<String, SOTerm>();
    Query q = new Query();
    q.setDistinct(false);

    QueryClass qcSOTerm = new QueryClass(SOTerm.class);
    q.addToSelect(qcSOTerm);
    q.addFrom(qcSOTerm);
    q.addToOrderBy(qcSOTerm);

    Results res = os.execute(q);

    Iterator it = res.iterator();

    while (it.hasNext()) {
      ResultsRow<InterMineObject> rr = (ResultsRow<InterMineObject>) it.next();
      SOTerm soTerm = (SOTerm) rr.get(0);
      soTerms.put(soTerm.getName(), soTerm);
    }
    return soTerms;
  }
Esempio n. 12
0
 /** {@inheritDoc} */
 @Override
 public Results execute(
     Query q, int batchSize, boolean optimise, boolean explain, boolean prefetch) {
   Results retval = new Results(q, this, getSequence(getComponentsForQuery(q)));
   if (batchSize != 0) {
     retval.setBatchSize(batchSize);
   }
   if (!optimise) {
     retval.setNoOptimise();
   }
   if (!explain) {
     retval.setNoExplain();
   }
   if (!prefetch) {
     retval.setNoPrefetch();
   }
   retval.setImmutable();
   return retval;
 }
Esempio n. 13
0
  private static void runSpanValidationQuery(InterMineAPI im) {

    // a Map contains orgName and its chrInfo accordingly
    // e.g. <D.Melanogaster, (D.Melanogaster, X, 5000)...>
    chrInfoMap = new HashMap<String, List<ChromosomeInfo>>();

    try {
      Query q = new Query();

      QueryClass qcOrg = new QueryClass(Organism.class);
      QueryClass qcChr = new QueryClass(Chromosome.class);

      // Result columns
      QueryField qfOrgName = new QueryField(qcOrg, "shortName");
      QueryField qfChrPID = new QueryField(qcChr, "primaryIdentifier");
      QueryField qfChrLength = new QueryField(qcChr, "length");

      // As in SQL SELECT ?,?,?
      q.addToSelect(qfOrgName);
      q.addToSelect(qfChrPID);
      q.addToSelect(qfChrLength);

      // As in SQL FROM ?,?
      q.addFrom(qcChr);
      q.addFrom(qcOrg);

      // As in SQL WHERE ?
      QueryObjectReference organism = new QueryObjectReference(qcChr, "organism");
      ContainsConstraint ccOrg = new ContainsConstraint(organism, ConstraintOp.CONTAINS, qcOrg);
      q.setConstraint(ccOrg);

      Results results = im.getObjectStore().execute(q);

      // a List contains all the chrInfo (organism, chrPID, length)
      List<ChromosomeInfo> chrInfoList = new ArrayList<ChromosomeInfo>();
      // a Set contains all the orgName
      Set<String> orgSet = new HashSet<String>();

      // Handle results
      for (Iterator<?> iter = results.iterator(); iter.hasNext(); ) {
        ResultsRow<?> row = (ResultsRow<?>) iter.next();

        String org = (String) row.get(0);
        String chrPID = (String) row.get(1);
        Integer chrLength = (Integer) row.get(2);

        // Add orgName to HashSet to filter out duplication
        orgSet.add(org);

        if (chrLength != null) {
          ChromosomeInfo chrInfo = new ChromosomeInfo();
          chrInfo.setOrgName(org);
          chrInfo.setChrPID(chrPID);
          chrInfo.setChrLength(chrLength);

          // Add ChromosomeInfo to Arraylist
          chrInfoList.add(chrInfo);
        }
      }

      // Iterate orgSet and chrInfoList to put data in chrInfoMap which has the key as the
      // orgName and value as a ArrayList containing a list of chrInfo which has the same
      // orgName
      for (String o : orgSet) {

        // a List to store chrInfo for the same organism
        List<ChromosomeInfo> chrInfoSubList = new ArrayList<ChromosomeInfo>();

        for (ChromosomeInfo chrInfo : chrInfoList) {
          if (o.equals(chrInfo.getOrgName())) {
            chrInfoSubList.add(chrInfo);
            chrInfoMap.put(o, chrInfoSubList);
          }
        }
      }

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
Esempio n. 14
0
  /** The method to run all the queries. */
  @SuppressWarnings("rawtypes")
  private void queryExecutor() {

    // Use spanOverlapFullResultMap to store the data in the session
    @SuppressWarnings("unchecked")
    Map<String, Map<GenomicRegion, List<SpanQueryResultRow>>> spanOverlapFullResultMap =
        (Map<String, Map<GenomicRegion, List<SpanQueryResultRow>>>)
            request.getSession().getAttribute("spanOverlapFullResultMap");

    if (spanOverlapFullResultMap == null) {
      spanOverlapFullResultMap =
          new HashMap<String, Map<GenomicRegion, List<SpanQueryResultRow>>>();
    }

    Map<GenomicRegion, List<SpanQueryResultRow>> spanOverlapResultDisplayMap =
        Collections.synchronizedMap(new LinkedHashMap<GenomicRegion, List<SpanQueryResultRow>>());

    // GBrowse track
    @SuppressWarnings("unchecked")
    Map<String, Map<GenomicRegion, LinkedHashMap<String, LinkedHashSet<GBrowseTrackInfo>>>>
        gbrowseFullTrackMap =
            (HashMap<
                    String,
                    Map<GenomicRegion, LinkedHashMap<String, LinkedHashSet<GBrowseTrackInfo>>>>)
                request.getSession().getAttribute("gbrowseFullTrackMap");

    if (gbrowseFullTrackMap == null) {
      gbrowseFullTrackMap =
          new HashMap<
              String, Map<GenomicRegion, LinkedHashMap<String, LinkedHashSet<GBrowseTrackInfo>>>>();
    }

    Map<GenomicRegion, LinkedHashMap<String, LinkedHashSet<GBrowseTrackInfo>>> gbrowseTrackMap =
        Collections.synchronizedMap(
            new LinkedHashMap<
                GenomicRegion, LinkedHashMap<String, LinkedHashSet<GBrowseTrackInfo>>>());

    if (!spanOverlapFullResultMap.containsKey(spanUUIDString)) {
      spanOverlapFullResultMap.put(spanUUIDString, spanOverlapResultDisplayMap);
      request.getSession().setAttribute("spanOverlapFullResultMap", spanOverlapFullResultMap);

      gbrowseFullTrackMap.put(spanUUIDString, gbrowseTrackMap);
      request.getSession().setAttribute("gbrowseFullTrackMap", gbrowseFullTrackMap);

      try {
        Query q;
        for (GenomicRegion aSpan : spanList) {
          q = new Query();
          q.setDistinct(true);

          String chrPID = aSpan.getChr();
          Integer start = aSpan.getStart();
          Integer end = aSpan.getEnd();

          /*
          >>>>> TEST CODE <<<<<
          LOG.info("OrgName: " + orgName);
          LOG.info("chrPID: " + chrPID);
          LOG.info("start: " + start);
          LOG.info("end: " + end);
          LOG.info("FeatureTypes: " + ftKeys);
          LOG.info("Submissions: " + subKeys);
          >>>>> TEST CODE <<<<<
          */

          // DB tables
          QueryClass qcOrg = new QueryClass(Organism.class);
          QueryClass qcChr = new QueryClass(Chromosome.class);
          QueryClass qcFeature = new QueryClass(SequenceFeature.class);
          QueryClass qcLoc = new QueryClass(Location.class);
          QueryClass qcSubmission = new QueryClass(Submission.class);

          QueryField qfOrgName = new QueryField(qcOrg, "shortName");
          QueryField qfChrPID = new QueryField(qcChr, "primaryIdentifier");
          QueryField qfFeaturePID = new QueryField(qcFeature, "primaryIdentifier");
          QueryField qfFeatureId = new QueryField(qcFeature, "id");
          QueryField qfFeatureClass = new QueryField(qcFeature, "class");
          QueryField qfSubmissionTitle = new QueryField(qcSubmission, "title");
          QueryField qfSubmissionDCCid = new QueryField(qcSubmission, "DCCid");
          QueryField qfChr = new QueryField(qcChr, "primaryIdentifier");
          QueryField qfLocStart = new QueryField(qcLoc, "start");
          QueryField qfLocEnd = new QueryField(qcLoc, "end");

          q.addToSelect(qfFeatureId);
          q.addToSelect(qfFeaturePID);
          q.addToSelect(qfFeatureClass);
          q.addToSelect(qfChr);
          q.addToSelect(qfLocStart);
          q.addToSelect(qfLocEnd);
          q.addToSelect(qfSubmissionDCCid);
          q.addToSelect(qfSubmissionTitle);

          q.addFrom(qcChr);
          q.addFrom(qcOrg);
          q.addFrom(qcFeature);
          q.addFrom(qcLoc);
          q.addFrom(qcSubmission);

          q.addToOrderBy(qfLocStart, "ascending");

          ConstraintSet constraints = new ConstraintSet(ConstraintOp.AND);

          q.setConstraint(constraints);

          // SequenceFeature.organism = Organism
          QueryObjectReference organism = new QueryObjectReference(qcFeature, "organism");
          ContainsConstraint ccOrg = new ContainsConstraint(organism, ConstraintOp.CONTAINS, qcOrg);
          constraints.addConstraint(ccOrg);

          // Organism.name = orgName
          SimpleConstraint scOrg =
              new SimpleConstraint(qfOrgName, ConstraintOp.EQUALS, new QueryValue(orgName));
          constraints.addConstraint(scOrg);

          // Location.feature = SequenceFeature
          QueryObjectReference locSubject = new QueryObjectReference(qcLoc, "feature");
          ContainsConstraint ccLocSubject =
              new ContainsConstraint(locSubject, ConstraintOp.CONTAINS, qcFeature);
          constraints.addConstraint(ccLocSubject);

          // Location.locatedOn = Chromosome
          QueryObjectReference locObject = new QueryObjectReference(qcLoc, "locatedOn");
          ContainsConstraint ccLocObject =
              new ContainsConstraint(locObject, ConstraintOp.CONTAINS, qcChr);
          constraints.addConstraint(ccLocObject);

          // Chromosome.primaryIdentifier = chrPID
          SimpleConstraint scChr =
              new SimpleConstraint(qfChrPID, ConstraintOp.EQUALS, new QueryValue(chrPID));
          constraints.addConstraint(scChr);

          // SequenceFeature.submissions = Submission
          QueryCollectionReference submission =
              new QueryCollectionReference(qcFeature, "submissions");
          ContainsConstraint ccSubmission =
              new ContainsConstraint(submission, ConstraintOp.CONTAINS, qcSubmission);
          constraints.addConstraint(ccSubmission);

          // SequenceFeature.class in a list
          constraints.addConstraint(new BagConstraint(qfFeatureClass, ConstraintOp.IN, ftKeys));
          // Submission.CCDid in a list
          constraints.addConstraint(new BagConstraint(qfSubmissionDCCid, ConstraintOp.IN, subKeys));

          OverlapRange overlapInput =
              new OverlapRange(new QueryValue(start), new QueryValue(end), locObject);
          OverlapRange overlapFeature =
              new OverlapRange(
                  new QueryField(qcLoc, "start"), new QueryField(qcLoc, "end"), locObject);
          OverlapConstraint oc =
              new OverlapConstraint(overlapInput, ConstraintOp.OVERLAPS, overlapFeature);
          constraints.addConstraint(oc);

          Results results = im.getObjectStore().execute(q);

          /*
          >>>>> TEST CODE <<<<<
          LOG.info("Query: " + q.toString());
          LOG.info("Result Size: " + results.size());
          LOG.info("Result >>>>> " + results);
          >>>>> TEST CODE <<<<<
          */

          List<SpanQueryResultRow> spanResults = new ArrayList<SpanQueryResultRow>();
          if (results == null || results.isEmpty()) {
            spanOverlapResultDisplayMap.put(aSpan, null);
            gbrowseTrackMap.put(aSpan, null);
          } else {
            for (Iterator<?> iter = results.iterator(); iter.hasNext(); ) {
              ResultsRow<?> row = (ResultsRow<?>) iter.next();

              SpanQueryResultRow aRow = new SpanQueryResultRow();
              aRow.setFeatureId((Integer) row.get(0));
              aRow.setFeaturePID((String) row.get(1));
              aRow.setFeatureClass(((Class) row.get(2)).getSimpleName());
              aRow.setChr((String) row.get(3));
              aRow.setStart((Integer) row.get(4));
              aRow.setEnd((Integer) row.get(5));
              aRow.setSubDCCid((String) row.get(6));
              aRow.setSubTitle((String) row.get(7));

              spanResults.add(aRow);
            }
            spanOverlapResultDisplayMap.put(aSpan, spanResults);
            gbrowseTrackMap.put(aSpan, getSubGbrowseTrack(spanResults)); // Gbrowse
          }
        }

      } catch (Exception e) {
        e.printStackTrace();
      }
    }
  }
Esempio n. 15
0
  @Override
  protected void execute() throws Exception {
    String pathString = request.getParameter("path");

    Map<String, Object> attributes = getHeaderAttributes();
    output.setHeaderAttributes(attributes);

    if (isEmpty(pathString)) {
      throw new BadRequestException("No path provided");
    }
    attributes.put("path", pathString);
    kvPairs.put("path", pathString);

    String typeConstraintStr = request.getParameter("typeConstraints");
    Map<String, String> typeMap = new HashMap<String, String>();
    if (!isEmpty(typeConstraintStr)) {
      logger.debug(typeConstraintStr);
      JSONObject typeJO = new JSONObject(typeConstraintStr);
      Iterator<String> it = (Iterator<String>) typeJO.keys();
      while (it.hasNext()) {
        String name = it.next();
        String subType = typeJO.getString(name);
        typeMap.put(name, subType);
      }
    }

    Model model = im.getModel();

    Path path;
    try {
      if (typeMap.isEmpty()) {
        path = new Path(model, pathString);
      } else {
        path = new Path(model, pathString, typeMap);
      }
    } catch (PathException e) {
      throw new BadRequestException("Bad path given: " + pathString, e);
    }

    Query q = new Query();

    attributes.put("class", path.getLastClassDescriptor().getUnqualifiedName());
    attributes.put("field", path.getLastElement());
    String type = ((AttributeDescriptor) path.getEndFieldDescriptor()).getType();
    String[] parts = split(type, '.');
    reverse(parts);
    attributes.put("type", parts[0]);

    QueryClass qc = new QueryClass(path.getPrefix().getEndType());
    q.addFrom(qc);

    QueryField qf1 = new QueryField(qc, path.getLastElement());
    q.addToSelect(qf1);

    QueryFunction qf = new QueryFunction();
    q.addToSelect(qf);
    q.addToGroupBy(qf1);

    int count = im.getObjectStore().count(q, ObjectStore.SEQUENCE_IGNORE);

    if (formatIsCount()) {
      output.addResultItem(Arrays.asList(String.valueOf(count)));
    } else {
      attributes.put("count", count);

      Results results = im.getObjectStore().execute(q, DEFAULT_BATCH_SIZE, true, true, false);
      Iterator<Object> iter = results.iterator();

      while (iter.hasNext()) {
        @SuppressWarnings("rawtypes")
        List row = (List) iter.next();
        Map<String, Object> jsonMap = new HashMap<String, Object>();
        jsonMap.put("value", row.get(0));
        jsonMap.put("count", row.get(1));
        JSONObject jo = new JSONObject(jsonMap);
        List<String> forOutput = new ArrayList<String>();
        forOutput.add(jo.toString());
        if (iter.hasNext()) {
          // Standard hack to ensure commas
          forOutput.add("");
        }
        output.addResultItem(forOutput);
      }
    }
  }
  public void testLoad() throws Exception {

    TSVFileReaderTask tsvTask = new TSVFileReaderTask();
    tsvTask.setIgnoreDuplicates(true);
    tsvTask.setIntegrationWriterAlias("integration.unittestmulti");
    tsvTask.setSourceName("testsource");

    cleanObjects(tsvTask.getDirectDataLoader().getIntegrationWriter());

    File tempFile = File.createTempFile("TSVFileReaderTaskTest", "tmp");
    FileWriter fw = new FileWriter(tempFile);
    InputStream is = getClass().getClassLoader().getResourceAsStream("TSVFileReaderTaskTest.tsv");
    BufferedReader br = new BufferedReader(new InputStreamReader(is));

    String line = null;
    while ((line = br.readLine()) != null) {
      fw.write(line + "\n");
    }

    fw.close();

    FileSet fileSet = new FileSet();

    fileSet.setFile(tempFile);

    tsvTask.addFileSet(fileSet);

    InputStream confInputStream =
        getClass().getClassLoader().getResourceAsStream("TSVFileReaderTaskTest.properties");
    DelimitedFileConfiguration dfc = new DelimitedFileConfiguration(model, confInputStream);

    tsvTask.executeInternal(dfc, tempFile);

    // Check the results to see if we have some data...
    ObjectStore os = tsvTask.getDirectDataLoader().getIntegrationWriter().getObjectStore();

    Query q = new Query();
    QueryClass empQueryClass = new QueryClass(Employee.class);
    QueryField qf0 = new QueryField(empQueryClass, "age");
    QueryField qf1 = new QueryField(empQueryClass, "name");
    QueryField qf2 = new QueryField(empQueryClass, "fullTime");

    q.addToSelect(qf0);
    q.addToSelect(qf1);
    q.addToSelect(qf2);
    q.addFrom(empQueryClass);

    q.addToOrderBy(qf1);

    Results r = os.execute(q);

    if (r.size() != 3) {
      for (List<Object> rr : (List<List<Object>>) ((List) r)) {
        System.err.print("row: ");
        for (Object obj : rr) {
          System.err.print("{" + obj + "} ");
        }
        System.err.println();
      }
    }

    assertEquals(3, r.size());

    List expectedRow0 = Arrays.asList(new Object[] {new Integer(10), "EmployeeA1", Boolean.FALSE});
    assertEquals(expectedRow0, r.get(0));

    List expectedRow1 = Arrays.asList(new Object[] {new Integer(20), "EmployeeA2", Boolean.TRUE});
    assertEquals(expectedRow1, r.get(1));

    List expectedRow2 = Arrays.asList(new Object[] {new Integer(0), "EmployeeA3", Boolean.FALSE});
    assertEquals(expectedRow2, r.get(2));
  }
Esempio n. 17
0
 /** {@inheritDoc} */
 public boolean isSingleBatch() {
   return osResults.isSingleBatch();
 }
Esempio n. 18
0
 /**
  * Return the ResultsInfo object from the underlying Results object.
  *
  * @return the ResultsInfo object
  * @throws ObjectStoreException if there is an exception while getting the info
  */
 public ResultsInfo getInfo() throws ObjectStoreException {
   return osResults.getInfo();
 }
Esempio n. 19
0
  /**
   * Read the UTRs collection of MRNA then set the fivePrimeUTR and threePrimeUTR fields with the
   * corresponding UTRs.
   *
   * @throws Exception if anything goes wrong
   */
  public void createUtrRefs() throws Exception {
    long startTime = System.currentTimeMillis();
    Query q = new Query();
    q.setDistinct(false);

    QueryClass qcMRNA = new QueryClass(model.getClassDescriptorByName("MRNA").getType());
    q.addFrom(qcMRNA);
    q.addToSelect(qcMRNA);
    q.addToOrderBy(qcMRNA);

    QueryClass qcUTR = new QueryClass(model.getClassDescriptorByName("UTR").getType());
    q.addFrom(qcUTR);
    q.addToSelect(qcUTR);
    q.addToOrderBy(qcUTR);

    QueryCollectionReference mrnaUtrsRef = new QueryCollectionReference(qcMRNA, "UTRs");
    ContainsConstraint mrnaUtrsConstraint =
        new ContainsConstraint(mrnaUtrsRef, ConstraintOp.CONTAINS, qcUTR);

    QueryObjectReference fivePrimeRef = new QueryObjectReference(qcMRNA, "fivePrimeUTR");
    ContainsConstraint fivePrimeNullConstraint =
        new ContainsConstraint(fivePrimeRef, ConstraintOp.IS_NULL);
    QueryObjectReference threePrimeRef = new QueryObjectReference(qcMRNA, "threePrimeUTR");
    ContainsConstraint threePrimeNullConstraint =
        new ContainsConstraint(threePrimeRef, ConstraintOp.IS_NULL);

    ConstraintSet cs = new ConstraintSet(ConstraintOp.AND);
    cs.addConstraint(mrnaUtrsConstraint);
    cs.addConstraint(fivePrimeNullConstraint);
    cs.addConstraint(threePrimeNullConstraint);

    q.setConstraint(cs);

    ObjectStore os = osw.getObjectStore();

    ((ObjectStoreInterMineImpl) os).precompute(q, Constants.PRECOMPUTE_CATEGORY);
    Results res = os.execute(q, 500, true, true, true);

    int count = 0;
    InterMineObject lastMRNA = null;

    InterMineObject fivePrimeUTR = null;
    InterMineObject threePrimeUTR = null;

    osw.beginTransaction();

    Class<? extends FastPathObject> fivePrimeUTRCls =
        model.getClassDescriptorByName("FivePrimeUTR").getType();

    Iterator<?> resIter = res.iterator();
    while (resIter.hasNext()) {
      ResultsRow<?> rr = (ResultsRow<?>) resIter.next();
      InterMineObject mrna = (InterMineObject) rr.get(0);
      InterMineObject utr = (InterMineObject) rr.get(1);

      if (lastMRNA != null && !mrna.getId().equals(lastMRNA.getId())) {
        // clone so we don't change the ObjectStore cache
        InterMineObject tempMRNA = PostProcessUtil.cloneInterMineObject(lastMRNA);
        if (fivePrimeUTR != null) {
          tempMRNA.setFieldValue("fivePrimeUTR", fivePrimeUTR);
          fivePrimeUTR = null;
        }
        if (threePrimeUTR != null) {
          tempMRNA.setFieldValue("threePrimeUTR", threePrimeUTR);
          threePrimeUTR = null;
        }
        osw.store(tempMRNA);
        count++;
      }

      if (DynamicUtil.isInstance(utr, fivePrimeUTRCls)) {
        fivePrimeUTR = utr;
      } else {
        threePrimeUTR = utr;
      }

      lastMRNA = mrna;
    }

    if (lastMRNA != null) {
      // clone so we don't change the ObjectStore cache
      InterMineObject tempMRNA = PostProcessUtil.cloneInterMineObject(lastMRNA);
      tempMRNA.setFieldValue("fivePrimeUTR", fivePrimeUTR);
      tempMRNA.setFieldValue("threePrimeUTR", threePrimeUTR);
      osw.store(tempMRNA);
      count++;
    }
    LOG.info(
        "Stored MRNA "
            + count
            + " times ("
            + count * 2
            + " fields set)"
            + " - took "
            + (System.currentTimeMillis() - startTime)
            + " ms.");
    osw.commitTransaction();

    // now ANALYSE tables relating to class that has been altered - may be rows added
    // to indirection tables
    if (osw instanceof ObjectStoreWriterInterMineImpl) {
      ClassDescriptor cld = model.getClassDescriptorByName("MRNA");
      DatabaseUtil.analyse(((ObjectStoreWriterInterMineImpl) osw).getDatabase(), cld, false);
    }
  }
Esempio n. 20
0
 /**
  * Make a copy of a Results object, but with a different batch size.
  *
  * @param oldResults the original Results objects
  * @param newBatchSize the new batch size
  * @return a new Results object with a new batch size
  */
 private Results changeResultBatchSize(Results oldResults, int newBatchSize) {
   Results newResults =
       oldResults.getObjectStore().execute(oldResults.getQuery(), newBatchSize, true, true, true);
   return newResults;
 }
  /** Test that transactions can be aborted */
  public void testAbortTransactions() throws Exception {
    Address address1 = new Address();
    address1.setAddress("Address 3");
    Address address2 = new Address();
    address2.setAddress("Address 4");

    Query q = new Query();
    QueryClass qcAddress = new QueryClass(Address.class);
    QueryField qf = new QueryField(qcAddress, "address");
    ConstraintSet cs1 = new ConstraintSet(ConstraintOp.OR);
    cs1.addConstraint(new SimpleConstraint(qf, ConstraintOp.MATCHES, new QueryValue("Address%")));
    q.addToSelect(qcAddress);
    q.addFrom(qcAddress);
    q.addToOrderBy(qf);
    q.setConstraint(cs1);

    Results res = writer.execute(q);
    assertEquals(res.toString(), 0, res.size());

    res = realOs.execute(q);
    assertEquals(res.toString(), 0, res.size());

    writer.beginTransaction();
    assertTrue(writer.isInTransaction());

    writer.store(address1);
    writer.store(address2);

    // TODO: These lines now fail, because we do not allow querying on writers with uncommitted
    // data. The writer should relax this restriction.
    res = writer.execute(q);
    assertEquals(2, res.size());

    res = realOs.execute(q);
    assertEquals(res.toString(), 0, res.size());

    writer.abortTransaction();
    assertFalse(writer.isInTransaction());

    // Should be nothing there unless we commit

    res = writer.execute(q);
    assertEquals(res.toString(), 0, res.size());

    res = realOs.execute(q);
    assertEquals(res.toString(), 0, res.size());
  }
Esempio n. 22
0
 /**
  * Returns the ObjectStore's maximum allowable offset.
  *
  * @return an int
  */
 public int getMaxRetrievableIndex() {
   return osResults.getObjectStore().getMaxOffset();
 }