@Override public List<QueryResult> getAllByRegionList(List<Region> regionList, QueryOptions options) { // db.regulatory_region.find({"chunkIds": {$in:["1_200", "1_300"]}, "start": 601156}) QueryBuilder builder = new QueryBuilder(); List<Object> featureType = options.getList("featureType", null); List<Object> featureClass = options.getList("featureClass", null); // options = addExcludeReturnFields("chunkIds", options); List<DBObject> queries = new ArrayList<>(); for (Region region : regionList) { int firstChunkId = getChunkId(region.getStart(), regulatoryRegionChunkSize); int lastChunkId = getChunkId(region.getEnd(), regulatoryRegionChunkSize); BasicDBList chunksId = new BasicDBList(); for (int j = firstChunkId; j <= lastChunkId; j++) { String chunkId = region.getChromosome() + "_" + j + "_" + regulatoryRegionChunkSize / 1000 + "k"; chunksId.add(chunkId); } // logger.info(chunksId.toString()); builder = builder .start("_chunkIds") .in(chunksId) .and("start") .lessThanEquals(region.getEnd()) .and("end") .greaterThanEquals(region.getStart()); if (featureType != null && featureType.size() > 0) { BasicDBList featureTypeDBList = new BasicDBList(); featureTypeDBList.addAll(featureType); builder = builder.and("featureType").in(featureTypeDBList); } if (featureClass != null && featureClass.size() > 0) { BasicDBList featureClassDBList = new BasicDBList(); featureClassDBList.addAll(featureClass); builder = builder.and("featureClass").in(featureClassDBList); } queries.add(builder.get()); } // System.out.println(">>"+regionList); // System.out.println(">>"+builder.get().toString()); return executeQueryList2(regionList, queries, options); }
@Override public QueryResult next(String chromosome, int position, QueryOptions options) { String featureType = options.getString("featureType", null); String featureClass = options.getString("featureClass", null); BasicDBList chunksId = new BasicDBList(); String chunkId = chromosome + "_" + getChunkId(position, regulatoryRegionChunkSize) + "_" + regulatoryRegionChunkSize / 1000 + "k"; chunksId.add(chunkId); // TODO: Add query to find next item considering next chunk // db.regulatory_region.find({ "chromosome" : "19" , "start" : { "$gt" : 62005} , "featureType" // : "TF_binding_site_motif"}).sort({start:1}).limit(1) QueryBuilder builder; if (options.getString("strand") == null || (options.getString("strand").equals("1") || options.getString("strand").equals("+"))) { // db.core.find({chromosome: "1", start: {$gt: 1000000}}).sort({start: 1}).limit(1) builder = QueryBuilder.start("_chunkIds") .in(chunksId) .and("chromosome") .is(chromosome) .and("start") .greaterThan(position); options.put("sort", new BasicDBObject("start", 1)); options.put("limit", 1); } else { builder = QueryBuilder.start("_chunkIds") .in(chunksId) .and("chromosome") .is(chromosome) .and("end") .lessThan(position); options.put("sort", new BasicDBObject("end", -1)); options.put("limit", 1); } if (featureType != null) { builder.and("featureType").is(featureType); } if (featureClass != null) { builder.and("featureClass").is(featureClass); } System.out.println(builder.get()); return executeQuery("result", builder.get(), options); }
@Override public List<QueryResult> getAllByPositionList(List<Position> positionList, QueryOptions options) { // db.regulatory_region.find({"chunkIds": {$in:["1_200", "1_300"]}, "start": 601156}) String featureType = options.getString("featureType", null); String featureClass = options.getString("featureClass", null); List<DBObject> queries = new ArrayList<>(); for (Position position : positionList) { String chunkId = position.getChromosome() + "_" + getChunkId(position.getPosition(), regulatoryRegionChunkSize) + "_" + regulatoryRegionChunkSize / 1000 + "k"; BasicDBList chunksId = new BasicDBList(); chunksId.add(chunkId); QueryBuilder builder = QueryBuilder.start("_chunkIds").in(chunksId).and("start").is(position.getPosition()); if (featureType != null) { builder.and("featureType").is(featureType); } if (featureClass != null) { builder.and("featureClass").is(featureClass); } // System.out.println("Query: " + builder.get()); queries.add(builder.get()); } System.out.println("Query: " + queries); // options = addExcludeReturnFields("chunkIds", options); return executeQueryList2(positionList, queries, options); }
public QueryResult<File> index( int fileId, int outDirId, String storageEngine, String sessionId, QueryOptions options) throws IOException, CatalogException, AnalysisExecutionException { if (options == null) { options = new QueryOptions(); } final boolean execute = options.getBoolean(AnalysisJobExecuter.EXECUTE); final boolean simulate = options.getBoolean(AnalysisJobExecuter.SIMULATE); final boolean recordOutput = options.getBoolean(AnalysisJobExecuter.RECORD_OUTPUT); final long start = System.currentTimeMillis(); /** Query catalog for user data. * */ String userId = catalogManager.getUserIdBySessionId(sessionId); File file = catalogManager.getFile(fileId, sessionId).first(); File outDir = catalogManager.getFile(outDirId, sessionId).first(); int studyIdByOutDirId = catalogManager.getStudyIdByFileId(outDirId); Study study = catalogManager.getStudy(studyIdByOutDirId, sessionId).getResult().get(0); if (file.getType() != File.Type.FILE) { throw new CatalogException( "Expected file type = " + File.Type.FILE + " instead of " + file.getType()); } final String dbName; if (options.containsKey(DB_NAME)) { dbName = options.getString(DB_NAME); } else { if (study.getAttributes().containsKey(DB_NAME) && study.getAttributes().get(DB_NAME) != null) { dbName = study.getAttributes().get(DB_NAME).toString(); } else { int projectId = catalogManager.getProjectIdByStudyId(study.getId()); String alias = catalogManager .getProject(projectId, new QueryOptions("include", "alias"), sessionId) .first() .getAlias(); dbName = Config.getAnalysisProperties() .getProperty(OPENCGA_ANALYSIS_STORAGE_DATABASE_PREFIX, "opencga_") + userId + "_" + alias; } } // TODO: Check if file can be indexed // ObjectMap to fill with modifications over the indexed file (like new attributes or jobId) ObjectMap indexFileModifyParams = new ObjectMap("attributes", new ObjectMap()); /** Create temporal Job Outdir * */ final URI temporalOutDirUri; final String randomString = "I_" + StringUtils.randomString(10); if (simulate) { temporalOutDirUri = createSimulatedOutDirUri(randomString); } else { temporalOutDirUri = catalogManager.createJobOutDir(studyIdByOutDirId, randomString, sessionId); } List<Sample> sampleList; /** Create index file* */ final File index; if (options.containsKey(INDEX_FILE_ID)) { logger.debug("Using an existing indexedFile."); int indexFileId = options.getInt(INDEX_FILE_ID); index = catalogManager.getFile(indexFileId, sessionId).first(); if (index.getType() != File.Type.INDEX) { throw new CatalogException("Expected {type: INDEX} in IndexedFile " + indexFileId); } if (index.getStatus() != File.Status.READY) { throw new CatalogException("Expected {status: READY} in IndexedFile " + indexFileId); } if (simulate) { index.setStatus(File.Status.INDEXING); } else { ObjectMap parameters = new ObjectMap("status", File.Status.INDEXING); catalogManager.modifyFile(index.getId(), parameters, sessionId); } /** Get file samples * */ sampleList = catalogManager .getAllSamples(study.getId(), new QueryOptions("id", index.getSampleIds()), sessionId) .getResult(); } else { /** Get file samples * */ sampleList = getFileSamples(study, file, indexFileModifyParams, simulate, options, sessionId); String indexedFileDescription = "Indexation of " + file.getName() + " (" + fileId + ")"; String indexedFileName = file.getName() + "." + storageEngine; String indexedFilePath = Paths.get(outDir.getPath(), indexedFileName).toString(); if (simulate) { index = new File( -10, indexedFileName, File.Type.INDEX, file.getFormat(), file.getBioformat(), indexedFilePath, userId, TimeUtils.getTime(), indexedFileDescription, File.Status.INDEXING, -1, -1, null, -1, null, null, new HashMap<String, Object>()); } else { index = catalogManager .createFile( studyIdByOutDirId, File.Type.INDEX, file.getFormat(), file.getBioformat(), indexedFilePath, null, null, indexedFileDescription, File.Status.INDEXING, 0, -1, null, -1, null, null, false, null, sessionId) .first(); } } /** Create commandLine * */ String commandLine = createCommandLine( study, file, index, sampleList, storageEngine, temporalOutDirUri, indexFileModifyParams, dbName, options); if (options.containsKey(PARAMETERS)) { List<String> extraParams = options.getAsStringList(PARAMETERS); for (String extraParam : extraParams) { commandLine += " " + extraParam; } } /** Create job * */ ObjectMap jobResourceManagerAttributes = new ObjectMap(); jobResourceManagerAttributes.put(Job.TYPE, Job.Type.INDEX); jobResourceManagerAttributes.put(Job.INDEXED_FILE_ID, index.getId()); String jobName = "index"; String jobDescription = "Indexing file " + file.getName() + " (" + fileId + ")"; final Job job = AnalysisJobExecuter.createJob( catalogManager, studyIdByOutDirId, jobName, OPENCGA_STORAGE_BIN_NAME, jobDescription, outDir, Collections.<Integer>emptyList(), sessionId, randomString, temporalOutDirUri, commandLine, execute, simulate, recordOutput, jobResourceManagerAttributes) .first(); if (simulate) { index.getAttributes().put("job", job); // index.getAttributes().putAll(indexFileModifyParams.getMap("attributes")); index.setSampleIds(indexFileModifyParams.getAsIntegerList("sampleIds")); // VariantSource variantSource = (VariantSource) // index.getAttributes().get("variantSource"); // for (Map.Entry<String, Integer> entry : // variantSource.getSamplesPosition().entrySet()) { // System.out.println("entry.getKey() = " + entry.getKey()); // System.out.println("entry.getValue() = " + entry.getValue()); // } // for (String s : variantSource.getSamples()) { // System.out.println("sample = " + s); // } // variantSource.setSamplesPosition(new HashMap<String, Integer>()); return new QueryResult<>( "indexFile", (int) (System.currentTimeMillis() - start), 1, 1, "", "", Collections.singletonList(index)); } else { /** Update IndexFile to add extra information (jobId, sampleIds, attributes, ...) * */ indexFileModifyParams.put("jobId", job.getId()); Set<Integer> jobIds; try { jobIds = new HashSet<>(new ObjectMap(index.getAttributes()).getAsIntegerList("jobIds")); } catch (Exception ignore) { jobIds = new HashSet<>(1); } if (index.getJobId() > 0) { jobIds.add(index.getJobId()); } jobIds.add(job.getId()); indexFileModifyParams.getMap("attributes").put("jobIds", jobIds); catalogManager.modifyFile(index.getId(), indexFileModifyParams, sessionId).getResult(); return new QueryResult<>( "indexFile", (int) (System.currentTimeMillis() - start), 1, 1, "", "", catalogManager.getFile(index.getId(), sessionId).getResult()); } }
private List<Sample> getFileSamples( Study study, File file, ObjectMap indexFileModifyParams, boolean simulate, QueryOptions options, String sessionId) throws CatalogException { List<Sample> sampleList; QueryOptions queryOptions = new QueryOptions( "include", Arrays.asList("projects.studies.samples.id", "projects.studies.samples.name")); if (file.getSampleIds() == null || file.getSampleIds().isEmpty()) { // Read samples from file List<String> sampleNames = null; switch (file.getBioformat()) { case VARIANT: { if (file.getAttributes().containsKey("variantSource")) { Object variantSource = file.getAttributes().get("variantSource"); if (variantSource instanceof VariantSource) { sampleNames = ((VariantSource) variantSource).getSamples(); } else if (variantSource instanceof Map) { sampleNames = new ObjectMap((Map) variantSource).getAsStringList("samples"); } else { logger.warn( "Unexpected object type of variantSource ({}) in file attributes. Expected {} or {}", variantSource.getClass(), VariantSource.class, Map.class); } } if (sampleNames == null) { VariantSource variantSource = readVariantSource(catalogManager, study, file); indexFileModifyParams .get("attributes", ObjectMap.class) .put("variantSource", variantSource); sampleNames = variantSource.getSamples(); } } break; default: return new LinkedList<>(); // throw new CatalogException("Unknown to get samples names from // bioformat " + file.getBioformat()); } // Find matching samples in catalog with the sampleName from the VariantSource. queryOptions.add("name", sampleNames); sampleList = catalogManager.getAllSamples(study.getId(), queryOptions, sessionId).getResult(); // check if all file samples exists on Catalog if (sampleList.size() != sampleNames.size()) { // Size does not match. Find the missing samples. Set<String> set = new HashSet<>(sampleNames); for (Sample sample : sampleList) { set.remove(sample.getName()); } logger.warn("Missing samples: m{}", set); if (options.getBoolean(CREATE_MISSING_SAMPLES, true)) { for (String sampleName : set) { if (simulate) { sampleList.add(new Sample(-1, sampleName, file.getName(), null, null)); } else { sampleList.add( catalogManager .createSample( study.getId(), sampleName, file.getName(), null, null, null, sessionId) .first()); } } } else { throw new CatalogException( "Can not find samples " + set + " in catalog"); // FIXME: Create missing samples?? } } } else { // Get samples from file.sampleIds queryOptions.add("id", file.getSampleIds()); sampleList = catalogManager.getAllSamples(study.getId(), queryOptions, sessionId).getResult(); } List<Integer> sampleIdsList = new ArrayList<>(sampleList.size()); for (Sample sample : sampleList) { sampleIdsList.add(sample.getId()); // // sampleIdsString.append(sample.getName()).append(":").append(sample.getId()).append(","); } indexFileModifyParams.put("sampleIds", sampleIdsList); return sampleList; }
/** * @param study Study where file is located * @param file File to be indexed * @param indexFile Generated index file * @param sampleList * @param storageEngine StorageEngine to be used * @param outDirUri Index outdir * @param indexFileModifyParams This map will be used to modify the indexFile * @param dbName * @return CommandLine * @throws org.opencb.opencga.catalog.db.CatalogDBException * @throws CatalogIOManagerException */ private String createCommandLine( Study study, File file, File indexFile, List<Sample> sampleList, String storageEngine, URI outDirUri, final ObjectMap indexFileModifyParams, final String dbName, QueryOptions options) throws CatalogException { // Create command line String userId = file.getOwnerId(); String name = file.getName(); String commandLine; ObjectMap indexAttributes = indexFileModifyParams.get("attributes", ObjectMap.class); String opencgaStorageBin = Paths.get(Config.getOpenCGAHome(), "bin", OPENCGA_STORAGE_BIN_NAME).toString(); if (file.getBioformat() == File.Bioformat.ALIGNMENT || name.endsWith(".bam") || name.endsWith(".sam")) { int chunkSize = 200; // TODO: Read from properties. commandLine = new StringBuilder(opencgaStorageBin) .append(" --storage-engine ") .append(storageEngine) .append(" index-alignments ") .append(" --file-id ") .append(indexFile.getId()) .append(" --database ") .append(dbName) .append(" --input ") .append(catalogManager.getFileUri(file)) .append(" --calculate-coverage ") .append(chunkSize) .append(" --mean-coverage ") .append(chunkSize) .append(" --outdir ") .append(outDirUri) // .append(" --credentials ") .toString(); indexAttributes.put("chunkSize", chunkSize); } else if (name.endsWith(".fasta") || name.endsWith(".fasta.gz")) { throw new UnsupportedOperationException(); } else if (file.getBioformat() == File.Bioformat.VARIANT || name.contains(".vcf") || name.contains(".vcf.gz")) { StringBuilder sampleIdsString = new StringBuilder(); for (Sample sample : sampleList) { sampleIdsString.append(sample.getName()).append(":").append(sample.getId()).append(","); } StringBuilder sb = new StringBuilder(opencgaStorageBin) .append(" --storage-engine ") .append(storageEngine) .append(" index-variants ") .append(" --file-id ") .append(indexFile.getId()) .append(" --study-name \'") .append(study.getName()) .append("\'") .append(" --study-id ") .append(study.getId()) // .append(" --study-type ").append(study.getType()) .append(" --database ") .append(dbName) .append(" --input ") .append(catalogManager.getFileUri(file)) .append(" --outdir ") .append(outDirUri) .append(" --include-genotypes ") .append(" --compress-genotypes ") .append(" --include-stats ") // .append(" --sample-ids ").append(sampleIdsString) // .append(" --credentials ") ; if (options.getBoolean(VariantStorageManager.ANNOTATE, true)) { sb.append(" --annotate "); } if (options.getBoolean(VariantStorageManager.INCLUDE_SRC, false)) { sb.append(" --include-src "); } commandLine = sb.toString(); } else { return null; } indexAttributes.put(INDEXED_FILE, file.getId()); indexAttributes.put(DB_NAME, dbName); indexAttributes.put(STORAGE_ENGINE, storageEngine); return commandLine; }
@Override public QueryResult<Variant> getAllVariantsByRegionAndStudy( Region region, String sourceId, QueryOptions options) { Long start, end, dbstart, dbend; start = System.currentTimeMillis(); QueryResult<Variant> queryResult = new QueryResult<>( String.format("%s:%d-%d", region.getChromosome(), region.getStart(), region.getEnd())); List<Variant> results = new LinkedList<>(); boolean includeSamples; boolean includeStats; boolean includeEffects; if (!options.containsKey("samples") && !options.containsKey("stats") && !options.containsKey("effects")) { includeSamples = true; includeStats = true; includeEffects = true; } else { includeSamples = options.containsKey("samples") && options.getBoolean("samples"); includeStats = options.containsKey("stats") && options.getBoolean("stats"); includeEffects = options.containsKey("effects") && options.getBoolean("effects"); } try { String startRow = buildRowkey(region.getChromosome(), Long.toString(region.getStart())); String stopRow = buildRowkey(region.getChromosome(), Long.toString(region.getEnd())); HTable table = new HTable(admin.getConfiguration(), tableName); dbstart = System.currentTimeMillis(); Scan regionScan = new Scan(startRow.getBytes(), stopRow.getBytes()); ResultScanner scanres = table.getScanner(regionScan); dbend = System.currentTimeMillis(); queryResult.setDbTime(dbend - dbstart); // Iterate over results and, optionally, their samples and statistics for (Result result : scanres) { String[] rowkeyParts = new String(result.getRow(), CHARSET_UTF_8).split("_"); String chromosome = rowkeyParts[0].replaceFirst("^0+(?!$)", ""); int position = Integer.parseInt(rowkeyParts[1]); // Get basic result fields from Protocol Buffers message NavigableMap<byte[], byte[]> infoMap = result.getFamilyMap("i".getBytes()); byte[] byteInfo = infoMap.get((sourceId + "_data").getBytes()); VariantFieldsProtos.VariantInfo protoInfo = VariantFieldsProtos.VariantInfo.parseFrom(byteInfo); String reference = protoInfo.getReference(); String alternate = StringUtils.join(protoInfo.getAlternateList(), ","); String format = StringUtils.join(protoInfo.getFormatList(), ":"); Variant variant = new Variant(chromosome, position, position, reference, alternate); // Set samples if requested if (includeSamples) { NavigableMap<byte[], byte[]> sampleMap = result.getFamilyMap("d".getBytes()); Map<String, Map<String, String>> resultSampleMap = new HashMap<>(); // Set samples for (byte[] s : sampleMap.keySet()) { String sampleName = (new String(s, CHARSET_UTF_8)).replaceAll(sourceId + "_", ""); VariantFieldsProtos.VariantSample sample = VariantFieldsProtos.VariantSample.parseFrom(sampleMap.get(s)); String sample1 = sample.getSample(); String[] values = sample1.split(":"); String[] fields = format.split(":"); Map<String, String> singleSampleMap = new HashMap<>(); for (int i = 0; i < fields.length; i++) { singleSampleMap.put(fields[i], values[i]); } // TODO // variant.addSampleData(sampleName, singleSampleMap); } } // Set stats if requested if (includeStats) { byte[] byteStats = infoMap.get((sourceId + "_stats").getBytes()); VariantFieldsProtos.VariantStats protoStats = VariantFieldsProtos.VariantStats.parseFrom(byteStats); VariantStats variantStats = new VariantStats( chromosome, position, reference, alternate, protoStats.getMaf(), protoStats.getMgf(), protoStats.getMafAllele(), protoStats.getMgfGenotype(), protoStats.getMissingAlleles(), protoStats.getMissingGenotypes(), protoStats.getMendelianErrors(), protoStats.getIsIndel(), protoStats.getCasesPercentDominant(), protoStats.getControlsPercentDominant(), protoStats.getCasesPercentRecessive(), protoStats.getControlsPercentRecessive()); variant.setStats(variantStats); } if (includeEffects) { QueryResult<VariantEffect> queryEffects = getEffectsByVariant(variant, options); variant.setEffect(queryEffects.getResult()); } results.add(variant); } } catch (IOException e) { System.err.println(e.getClass().getName() + ": " + e.getMessage()); } queryResult.setResult(results); queryResult.setNumResults(results.size()); end = System.currentTimeMillis(); queryResult.setTime(end - start); return queryResult; }
public QueryResult getSimpleVariantsByRegion( Region region, String sourceId, QueryOptions options) { Long start, end, dbstart, dbend; start = System.currentTimeMillis(); boolean includeStats; boolean includeEffects; if (!options.containsKey("stats") && !options.containsKey("effects")) { includeStats = true; includeEffects = true; } else { includeStats = options.containsKey("stats") && options.getBoolean("stats"); includeEffects = options.containsKey("effects") && options.getBoolean("effects"); } QueryResult<Variant> queryResult = new QueryResult<>( String.format("%s:%d-%d", region.getChromosome(), region.getStart(), region.getEnd())); List<Variant> results = new ArrayList<>(); String startRow = buildRowkey(region.getChromosome(), Long.toString(region.getStart())); String stopRow = buildRowkey(region.getChromosome(), Long.toString(region.getEnd())); BasicDBObject query = new BasicDBObject("position", new BasicDBObject("$gte", startRow).append("$lte", stopRow)) .append("sources.sourceId", sourceId); DBCollection collection = db.getCollection("variants"); dbstart = System.currentTimeMillis(); DBCursor variantInStudies = collection.find(query); dbend = System.currentTimeMillis(); queryResult.setDbTime(dbend - dbstart); for (DBObject result : variantInStudies) { String[] rowkeyParts = result.get("position").toString().split("_"); String chromosome = rowkeyParts[0].replaceFirst("^0+(?!$)", ""); int position = Integer.parseInt(rowkeyParts[1]); BasicDBList studies = (BasicDBList) result.get("sources"); BasicDBObject st = (BasicDBObject) studies.get(0); String ref = (String) st.get("ref"); String alt = StringUtils.join((ArrayList<String>) st.get("alt"), ","); // TODO Needs rework Variant variant = new Variant(chromosome, position, position, ref, alt); // Set stats informations if (includeStats) { VariantStats stats = new VariantStats(); BasicDBObject mongoStats = (BasicDBObject) st.get("stats"); stats.setMaf((float) (double) mongoStats.get("maf")); stats.setMafAllele((String) mongoStats.get("alleleMaf")); stats.setMissingGenotypes((int) mongoStats.get("missing")); List<Genotype> genotypeCount = new ArrayList<>(); for (BasicDBObject s : (List<BasicDBObject>) mongoStats.get("genotypeCount")) { for (Map.Entry<String, Object> entry : s.entrySet()) { Genotype genotype = new Genotype(entry.getKey()); genotype.setCount((Integer) entry.getValue()); genotypeCount.add(genotype); } } stats.setGenotypes(genotypeCount); variant.setStats(stats); } // TODO Set consequence type names if (includeEffects) { BasicDBList mongoEffects = (BasicDBList) st.get("effects"); if (mongoEffects != null) { for (Object e : mongoEffects) { String effectObo = e.toString(); VariantEffect effect = new VariantEffect(); effect.setConsequenceTypeObo(effectObo); variant.addEffect(effect); } } } results.add(variant); } queryResult.setResult(results); queryResult.setNumResults(results.size()); end = System.currentTimeMillis(); queryResult.setTime(end - start); return queryResult; }
@GET @Path("/{fileId}/fetch") @Produces("application/json") public Response fetch( @PathParam("fileId") @DefaultValue("") String fileId, @QueryParam("backend") String backend, @QueryParam("dbName") String dbName, @QueryParam("bioformat") @DefaultValue("") String bioformat, @QueryParam("region") String region, @QueryParam("path") @DefaultValue("") String path, @QueryParam("view_as_pairs") @DefaultValue("false") boolean view_as_pairs, @QueryParam("include_coverage") @DefaultValue("true") boolean include_coverage, @QueryParam("process_differences") @DefaultValue("true") boolean process_differences, @QueryParam("histogram") @DefaultValue("false") boolean histogram, @QueryParam("interval") @DefaultValue("2000") int interval) { try { switch (bioformat) { case "vcf": break; case "bam": AlignmentStorageManager sm = StorageManagerFactory.getAlignmentStorageManager(backend); ObjectMap params = new ObjectMap(); AlignmentDBAdaptor dbAdaptor = sm.getDBAdaptor(dbName, params); QueryOptions options = new QueryOptions(); if (path != null && !path.isEmpty()) { String rootDir = OpenCGAStorageService.getInstance() .getProperties() .getProperty( "OPENCGA.STORAGE.ROOTDIR", "/home/cafetero/opencga/catalog/users/jcoll/projects/1/1/"); options.put( AlignmentDBAdaptor.QO_BAM_PATH, Paths.get(rootDir, path.replace(":", "/")).toString()); } options.put(AlignmentDBAdaptor.QO_FILE_ID, fileId); options.put(AlignmentDBAdaptor.QO_VIEW_AS_PAIRS, view_as_pairs); options.put(AlignmentDBAdaptor.QO_INCLUDE_COVERAGE, include_coverage); options.put(AlignmentDBAdaptor.QO_PROCESS_DIFFERENCES, process_differences); options.put(AlignmentDBAdaptor.QO_INTERVAL_SIZE, interval); options.put(AlignmentDBAdaptor.QO_HISTOGRAM, histogram); // options.put(AlignmentDBAdaptor.QO_COVERAGE_CHUNK_SIZE, chunkSize); QueryResult queryResult; if (histogram) { queryResult = dbAdaptor.getAllIntervalFrequencies(new Region(region), options); } else { queryResult = dbAdaptor.getAllAlignmentsByRegion(Arrays.asList(new Region(region)), options); } return createOkResponse(queryResult); default: return createErrorResponse("Unknown bioformat " + bioformat); } } catch (Exception e) { e.printStackTrace(); return createErrorResponse(e.toString()); } return createErrorResponse("Unimplemented!"); }