@Override public List<Entity> annotateEntity(Entity entity) { HttpGet httpGet = new HttpGet(getServiceUri(entity)); Entity resultEntity = new MapEntity(); if (!annotatedInput.contains(entity.get(UNIPROT_ID))) { annotatedInput.add(entity.get(UNIPROT_ID)); try { HttpResponse response = httpClient.execute(httpGet); BufferedReader br = new BufferedReader( new InputStreamReader( (response.getEntity().getContent()), Charset.forName("UTF-8"))); String output; StringBuilder result = new StringBuilder(); while ((output = br.readLine()) != null) { result.append(output); } resultEntity = parseResult(entity, result.toString()); } catch (Exception e) { httpGet.abort(); // TODO: how to handle exceptions at this point throw new RuntimeException(e); } } return Collections.singletonList(resultEntity); }
/** * Creates a internal molgenis id from a vcf entity * * @param vcfEntity * @return the id */ public static String createId(Entity vcfEntity) { StringBuilder id = new StringBuilder(); id.append(StringUtils.strip(vcfEntity.get(CHROM).toString())); id.append("_"); id.append(StringUtils.strip(vcfEntity.get(POS).toString())); id.append("_"); id.append(StringUtils.strip(vcfEntity.get(REF).toString())); id.append("_"); id.append(StringUtils.strip(vcfEntity.get(ALT).toString())); return id.toString(); }
private void validateEntityValueUniqueness( Entity entity, ValidationResource validationResource, ValidationMode validationMode) { validationResource .getUniqueAttrs() .forEach( uniqueAttr -> { Object attrValue = entity.get(uniqueAttr.getName()); if (attrValue != null) { if (uniqueAttr.getDataType() instanceof XrefField) { attrValue = ((Entity) attrValue).getIdValue(); } HugeMap<Object, Object> uniqueAttrValues = validationResource.getUniqueAttrsValues().get(uniqueAttr.getName()); Object existingEntityId = uniqueAttrValues.get(attrValue); if ((validationMode == ValidationMode.ADD && existingEntityId != null) || (validationMode == ValidationMode.UPDATE && existingEntityId != null && !existingEntityId.equals(entity.getIdValue()))) { ConstraintViolation constraintViolation = new ConstraintViolation( format( "Duplicate value '%s' for unique attribute '%s' from entity '%s'", attrValue, uniqueAttr.getName(), getName()), uniqueAttr, Long.valueOf(validationResource.getRow())); validationResource.addViolation(constraintViolation); } else { uniqueAttrValues.put(attrValue, entity.getIdValue()); } } }); }
private String getServiceUri(Entity entity) { StringBuilder uriStringBuilder = new StringBuilder(); uriStringBuilder.append(EBI_CHEMBLWS_URL); uriStringBuilder.append(entity.get(UNIPROT_ID)); uriStringBuilder.append(".json"); return uriStringBuilder.toString(); }
@Override public Object get(String attributeName) { if (fetch.hasField(attributeName)) { return decoratedEntity.get(attributeName); } else { return entityManager.getReference(getEntityMetaData(), getIdValue()).get(attributeName); } }
/** * @param entityName The name of the entity to update * @param attributeName The name of the attribute to update * @param request EntityCollectionBatchRequestV2 * @param response HttpServletResponse * @throws Exception */ @RequestMapping(value = "/{entityName}/{attributeName}", method = PUT) @ResponseStatus(OK) public synchronized void updateAttribute( @PathVariable("entityName") String entityName, @PathVariable("attributeName") String attributeName, @RequestBody @Valid EntityCollectionBatchRequestV2 request, HttpServletResponse response) throws Exception { final EntityMetaData meta = dataService.getEntityMetaData(entityName); if (meta == null) { throw createUnknownEntityException(entityName); } try { AttributeMetaData attr = meta.getAttribute(attributeName); if (attr == null) { throw createUnknownAttributeException(entityName, attributeName); } if (attr.isReadonly()) { throw createMolgenisDataAccessExceptionReadOnlyAttribute(entityName, attributeName); } final List<Entity> entities = request .getEntities() .stream() .filter(e -> e.size() == 2) .map(e -> this.restService.toEntity(meta, e)) .collect(Collectors.toList()); if (entities.size() != request.getEntities().size()) { throw createMolgenisDataExceptionIdentifierAndValue(); } final List<Entity> updatedEntities = new ArrayList<Entity>(); int count = 0; for (Entity entity : entities) { String id = checkForEntityId(entity, count); Entity originalEntity = dataService.findOne(entityName, id); if (originalEntity == null) { throw createUnknownEntityExceptionNotValidId(id); } Object value = this.restService.toEntityValue(attr, entity.get(attributeName)); originalEntity.set(attributeName, value); updatedEntities.add(originalEntity); count++; } // update all entities this.dataService.update(entityName, updatedEntities.stream()); response.setStatus(HttpServletResponse.SC_OK); } catch (Exception e) { response.setStatus(HttpServletResponse.SC_NO_CONTENT); throw e; } }
@SuppressWarnings("unchecked") private void validateEntityValueReadOnly(Entity entity, ValidationResource validationResource) { Entity entityToUpdate = findOne(entity.getIdValue()); validationResource .getReadonlyAttrs() .forEach( readonlyAttr -> { Object value = entity.get(readonlyAttr.getName()); Object existingValue = entityToUpdate.get(readonlyAttr.getName()); if (readonlyAttr.getDataType() instanceof XrefField) { if (value != null) { value = ((Entity) value).getIdValue(); } if (existingValue != null) { existingValue = ((Entity) existingValue).getIdValue(); } } else if (readonlyAttr.getDataType() instanceof MrefField) { List<Object> entityIds = new ArrayList<>(); ((Iterable<Entity>) value) .forEach( mrefEntity -> { entityIds.add(mrefEntity.getIdValue()); }); value = entityIds; List<Object> existingEntityIds = new ArrayList<>(); ((Iterable<Entity>) existingValue) .forEach( mrefEntity -> { existingEntityIds.add(mrefEntity.getIdValue()); }); existingValue = existingEntityIds; } if (value != null && existingValue != null && !value.equals(existingValue)) { validationResource.addViolation( new ConstraintViolation( format( "The attribute '%s' of entity '%s' can not be changed it is readonly.", readonlyAttr.getName(), getName()), Long.valueOf(validationResource.getRow()))); } }); }
private Entity parseResult(Entity entity, String json) throws IOException { Entity result = new MapEntity(); if (!"".equals(json)) { Map<String, Object> rootMap = jsonStringToMap(json); Map<String, Object> resultMap = (Map<String, Object>) rootMap.get("target"); resultMap.put(UNIPROT_ID, entity.get(UNIPROT_ID)); result = new MapEntity(resultMap); } return result; }
public Collection<EntityMetaData> loadEMD(RepositoryCollection emdFormattedRepos) { // extract entity metadata Map<String, DefaultEntityMetaData> entities = new LinkedHashMap<String, DefaultEntityMetaData>(); for (Entity e : emdFormattedRepos.getRepositoryByEntityName("entities")) { DefaultEntityMetaData emd = new DefaultEntityMetaData(e.getString("name")); if (e.getBoolean("abstract")) emd.setAbstract(true); } // extract extends relations for (Entity e : emdFormattedRepos.getRepositoryByEntityName("entities")) { if (e.get("extends") != null) { DefaultEntityMetaData emd = entities.get(e.get("name")); emd.setExtends(entities.get(e.get("extends"))); } } Collection<EntityMetaData> result = new ArrayList<EntityMetaData>(); result.addAll(entities.values()); return result; }
@RequestMapping( value = "/aggregate", method = RequestMethod.POST, produces = "application/json", consumes = "application/json") @ResponseBody public AggregateResponse aggregate(@Valid @RequestBody AggregateRequest request) { // TODO create utility class to extract info from entity/attribute uris String[] attributeUriTokens = request.getAttributeUri().split("/"); String entityName = attributeUriTokens[3]; String attributeName = attributeUriTokens[5]; QueryImpl q = request.getQ() != null ? new QueryImpl(request.getQ()) : new QueryImpl(); EntityMetaData entityMeta = dataService.getEntityMetaData(entityName); AttributeMetaData attributeMeta = entityMeta.getAttribute(attributeName); FieldTypeEnum dataType = attributeMeta.getDataType().getEnumType(); if (dataType != FieldTypeEnum.BOOL && dataType != FieldTypeEnum.CATEGORICAL) { throw new RuntimeException("Unsupported data type " + dataType); } EntityMetaData refEntityMeta = null; String refAttributeName = null; if (dataType == FieldTypeEnum.CATEGORICAL) { refEntityMeta = attributeMeta.getRefEntity(); refAttributeName = refEntityMeta.getLabelAttribute().getName(); } Map<String, Integer> aggregateMap = new HashMap<String, Integer>(); for (Entity entity : dataService.findAll(entityName, q)) { String val; switch (dataType) { case BOOL: val = entity.getString(attributeName); break; case CATEGORICAL: Entity refEntity = (Entity) entity.get(attributeName); val = refEntity.getString(refAttributeName); break; default: throw new RuntimeException("Unsupported data type " + dataType); } Integer count = aggregateMap.get(val); if (count == null) aggregateMap.put(val, 1); else aggregateMap.put(val, count + 1); } return new AggregateResponse(aggregateMap); }
@Test public void createByUrl() throws IOException { InputStream in = getClass().getResourceAsStream("/testdata.csv"); File csvFile = new File(FileUtils.getTempDirectory(), "testdata.csv"); FileCopyUtils.copy(in, new FileOutputStream(csvFile)); String url = "csv://" + csvFile.getAbsolutePath(); EntitySource entitySource = new CsvEntitySource(url, null); try { assertEquals(entitySource.getUrl(), url); Iterator<String> it = entitySource.getEntityNames().iterator(); assertNotNull(it); assertTrue(it.hasNext()); assertEquals(it.next(), "testdata"); assertFalse(it.hasNext()); Repository<? extends Entity> repo = entitySource.getRepositoryByEntityName("testdata"); assertNotNull(repo); Iterator<AttributeMetaData> itMeta = repo.getAttributes().iterator(); assertNotNull(itMeta); assertTrue(itMeta.hasNext()); AttributeMetaData col1 = itMeta.next(); assertNotNull(col1); assertEquals(col1.getName(), "col1"); AttributeMetaData col2 = itMeta.next(); assertNotNull(col2); assertEquals(col2.getName(), "col2"); assertFalse(itMeta.hasNext()); Iterator<? extends Entity> itEntity = repo.iterator(); assertNotNull(itEntity); assertTrue(itEntity.hasNext()); Entity entity = itEntity.next(); assertNotNull(entity); assertEquals(entity.get("col1"), "val1"); assertEquals(entity.get("col2"), "val2"); } finally { entitySource.close(); } }
private void validateEntityValueRequired(Entity entity, ValidationResource validationResource) { validationResource .getRequiredValueAttrs() .forEach( nonNillableAttr -> { Object value = entity.get(nonNillableAttr.getName()); if (value == null || (nonNillableAttr.getDataType() instanceof MrefField && !entity.getEntities(nonNillableAttr.getName()).iterator().hasNext())) { boolean isValid = false; // FIXME remove hack (see https://github.com/molgenis/molgenis/issues/4308) // Do not validate if Questionnaire status is not SUBMITTED if (EntityUtils.doesExtend(getEntityMetaData(), "Questionnaire") && !"SUBMITTED".equals(entity.getString("status"))) { isValid = true; } // Do not validate if visibleExpression resolves to false else if (nonNillableAttr.getVisibleExpression() != null && !expressionValidator.resolveBooleanExpression( nonNillableAttr.getVisibleExpression(), entity, getEntityMetaData())) { isValid = true; } if (!isValid) { String message = format( "The attribute '%s' of entity '%s' can not be null.", nonNillableAttr.getName(), getName()); ConstraintViolation constraintViolation = new ConstraintViolation( message, nonNillableAttr, Long.valueOf(validationResource.getRow())); validationResource.addViolation(constraintViolation); } } }); }
public Collection<EntityMetaData> loadOMX(RepositoryCollection omx) { // extract attribute metadata Map<String, AttributeMetaData> attributes = new LinkedHashMap<String, AttributeMetaData>(); for (Entity e : omx.getRepositoryByEntityName("observablefeature")) { logger.debug("found observablefeature: " + e); DefaultAttributeMetaData att = new DefaultAttributeMetaData(e.getString("name")); if (e.get("dataType") != null) att.setDataType(MolgenisFieldTypes.getType(e.getString("dataType"))); attributes.put(e.getString("identifier"), att); if (e.get("description") != null) att.setDescription(e.getString("description")); // TODO unit! if(e.get("unit") != null) if ("xref".equals(e.get("dataType")) || "mref".equals(e.get("dataType"))) { // TODO: cannot solve!!! att.setRefEntity(omxEntities.get("Characteristic")); } if ("categorical".equals(e.get("dataType"))) { att.setRefEntity(omxEntities.get("Category")); } } // TODO: fix categorical! // extract protocols as entities(abstract=true) Map<String, EntityMetaData> entities = new LinkedHashMap<String, EntityMetaData>(); for (Entity e : omx.getRepositoryByEntityName("protocol")) { // skip all null entities if (hasValues(e)) { logger.debug("found protocol: " + e); DefaultEntityMetaData ent = new DefaultEntityMetaData(e.getString("identifier")); // alas ent.setLabel(e.getString("name")); ent.setAbstract(true); // add attributes if (e.get("features_identifier") != null) for (String attIdentifier : e.getList("features_identifier")) { if (attributes.get(attIdentifier) == null) throw new RuntimeException("attribute '" + attIdentifier + "' unknown"); ent.addAttributeMetaData(attributes.get(attIdentifier)); } entities.put(e.getString("identifier"), ent); } } for (Entity e : omx.getRepositoryByEntityName("protocol")) { // add subprotocols as compound if (e.get("subprotocols_identifier") != null) { for (String subProtocol : e.getList("subprotocols_identifier")) { DefaultAttributeMetaData att = new DefaultAttributeMetaData(subProtocol); att.setDataType(MolgenisFieldTypes.COMPOUND); att.setRefEntity(entities.get(subProtocol)); ((DefaultEntityMetaData) entities.get(e.get("identifier"))).addAttributeMetaData(att); } } } // create dataset as entities for (Entity e : omx.getRepositoryByEntityName("dataset")) { logger.debug("found dataset: " + e); DefaultEntityMetaData ent = new DefaultEntityMetaData(e.getString("identifier")); ent.setLabel(e.getString("name")); // dataset 'extends' protocol ent.setExtends(entities.get(e.getString("protocolused_identifier"))); entities.put(e.getString("identifier"), ent); } return entities.values(); }
@Override public void set(Entity values) { values.getAttributeNames().forEach(attrName -> set(attrName, values.get(attrName))); }
@RequestMapping( value = "/verify", method = RequestMethod.POST, headers = "Content-Type=multipart/form-data") public void verify( @RequestParam(value = "selectedDataSet", required = false) String selectedDataSetId, @RequestParam Part file, HttpServletResponse response, Model model) throws IOException { EntitySource reader = null; ExcelWriter<Entity> excelWriterRanks = null; try { if (selectedDataSetId != null) { String origFileName = FileUploadUtils.getOriginalFileName(file); File uploadFile = fileStore.store(file.getInputStream(), origFileName); response.setContentType("application/vnd.ms-excel"); response.addHeader( "Content-Disposition", "attachment; filename=" + getCsvFileName(file.getName() + "-ranks")); excelWriterRanks = new ExcelWriter<Entity>(response.getOutputStream()); excelWriterRanks.addCellProcessor(new LowerCaseProcessor(true, false)); Writable<Entity> sheetWriterRank = null; Writable<Entity> sheetWriterRankStatistics = null; Writable<Entity> sheetWriteBiobankRanks = null; Writable<Entity> sheetWriteSpssInput = null; reader = new ExcelEntitySourceFactory().create(uploadFile); Repository<? extends Entity> inputSheet = reader.getRepositoryByEntityName("Sheet1"); List<String> biobankNames = new ArrayList<String>(); for (AttributeMetaData attr : inputSheet.getAttributes()) { biobankNames.add(attr.getName()); } String firstColumn = biobankNames.get(0); biobankNames.remove(0); // First column has to correspond to the selected dataset DataSet ds = dataService.findOne(DataSet.ENTITY_NAME, Integer.parseInt(selectedDataSetId)); if (ds.getName().equalsIgnoreCase(firstColumn)) { Map<String, Map<String, List<String>>> maunalMappings = new HashMap<String, Map<String, List<String>>>(); for (Entity row : inputSheet) { String variableName = row.getString(firstColumn); if (!maunalMappings.containsKey(variableName)) maunalMappings.put(variableName, new HashMap<String, List<String>>()); for (String biobank : biobankNames) { if (row.get(biobank) != null) { String mappingString = row.get(biobank).toString(); if (!maunalMappings.containsKey(variableName)) { maunalMappings.put(variableName, new HashMap<String, List<String>>()); } if (!maunalMappings.get(variableName).containsKey(biobank.toLowerCase())) { maunalMappings .get(variableName) .put(biobank.toLowerCase(), new ArrayList<String>()); } maunalMappings .get(variableName) .get(biobank.toLowerCase()) .addAll(Arrays.asList(mappingString.split(","))); } } } List<String> lowerCaseBiobankNames = new ArrayList<String>(); for (String element : biobankNames) { lowerCaseBiobankNames.add(element.toLowerCase()); } List<DataSet> dataSets = dataService.findAllAsList( DataSet.ENTITY_NAME, new QueryImpl().in(DataSet.NAME, lowerCaseBiobankNames)); lowerCaseBiobankNames.add(0, firstColumn.toLowerCase()); sheetWriterRank = excelWriterRanks.createWritable("result", lowerCaseBiobankNames); Map<String, Map<String, List<Integer>>> rankCollection = new HashMap<String, Map<String, List<Integer>>>(); List<Object> allRanks = new ArrayList<Object>(); for (Entry<String, Map<String, List<String>>> entry : maunalMappings.entrySet()) { String variableName = entry.getKey(); List<String> ranks = new ArrayList<String>(); ranks.add(variableName); Map<String, List<String>> mappingDetail = entry.getValue(); List<ObservableFeature> features = dataService.findAllAsList( ObservableFeature.ENTITY_NAME, new QueryImpl().eq(ObservableFeature.NAME, variableName)); String description = features.get(0).getDescription(); if (!rankCollection.containsKey(description)) rankCollection.put(description, new HashMap<String, List<Integer>>()); if (!features.isEmpty()) { Entity row = new MapEntity(); row.set(firstColumn.toLowerCase(), description); for (DataSet dataSet : dataSets) { List<Integer> ranksBiobank = new ArrayList<Integer>(); if (mappingDetail.containsKey(dataSet.getName().toLowerCase())) { Map<String, Hit> mappedFeatureIds = findFeaturesFromIndex( "name", mappingDetail.get(dataSet.getName().toLowerCase()), dataSet.getId()); String mappingDataSetIdentifier = SecurityUtils.getCurrentUsername() + "-" + selectedDataSetId + "-" + dataSet.getId(); Query q = new QueryImpl() .eq("store_mapping_feature", features.get(0).getId()) .pageSize(50) .sort(new Sort(Direction.DESC, "store_mapping_score")); SearchRequest searchRequest = new SearchRequest(mappingDataSetIdentifier, q, null); SearchResult result = searchService.search(searchRequest); if (mappedFeatureIds.size() == 0) { row.set(dataSet.getName().toLowerCase(), "N/A2"); continue; } List<String> ids = new ArrayList<String>(); for (Hit hit : result.getSearchHits()) { Map<String, Object> columnValueMap = hit.getColumnValueMap(); ids.add(columnValueMap.get("store_mapping_mapped_feature").toString()); } Map<String, Hit> featureInfos = findFeaturesFromIndex("id", ids, dataSet.getId()); String previousDescription = null; int rank = 0; for (Hit hit : result.getSearchHits()) { Map<String, Object> columnValueMap = hit.getColumnValueMap(); String mappedFeatureId = columnValueMap.get("store_mapping_mapped_feature").toString(); String mappedFeatureDescription = featureInfos .get(mappedFeatureId) .getColumnValueMap() .get("description") .toString() .replaceAll("[^0-9a-zA-Z ]", " "); rank++; if (previousDescription != null && previousDescription.equalsIgnoreCase(mappedFeatureDescription)) rank--; if (mappedFeatureIds.containsKey(mappedFeatureId)) { ranksBiobank.add(rank); allRanks.add(rank); mappedFeatureIds.remove(mappedFeatureId); } previousDescription = mappedFeatureDescription; } if (mappedFeatureIds.size() == 0) { String output = StringUtils.join(ranksBiobank, ','); if (ranksBiobank.size() > 1) { output += " (" + averageRank(ranksBiobank) + ")"; } row.set(dataSet.getName().toLowerCase(), output); } else { for (int i = 0; i < mappedFeatureIds.size(); i++) allRanks.add("Not mapped"); row.set(dataSet.getName().toLowerCase(), "Not mapped"); ranksBiobank.clear(); } } else row.set(dataSet.getName().toLowerCase(), "N/A1"); rankCollection.get(description).put(dataSet.getName().toLowerCase(), ranksBiobank); } sheetWriterRank.add(row); } } Map<String, List<Integer>> rankCollectionPerBiobank = new HashMap<String, List<Integer>>(); { sheetWriterRankStatistics = excelWriterRanks.createWritable( "rank statistics", Arrays.asList( firstColumn.toLowerCase(), "average rank", "round-up rank", "median rank", "minium", "maximum")); for (Entry<String, Map<String, List<Integer>>> entry : rankCollection.entrySet()) { String variableName = entry.getKey(); Entity row = new MapEntity(); row.set(firstColumn.toLowerCase(), variableName); List<Integer> rankAllBiobanks = new ArrayList<Integer>(); for (Entry<String, List<Integer>> rankBiobanks : entry.getValue().entrySet()) { if (!rankCollectionPerBiobank.containsKey(rankBiobanks.getKey())) rankCollectionPerBiobank.put(rankBiobanks.getKey(), new ArrayList<Integer>()); rankCollectionPerBiobank.get(rankBiobanks.getKey()).addAll(rankBiobanks.getValue()); rankAllBiobanks.addAll(rankBiobanks.getValue()); } row.set("average rank", averageRank(rankAllBiobanks)); row.set("round-up rank", Math.ceil(averageRank(rankAllBiobanks))); Collections.sort(rankAllBiobanks); if (!rankAllBiobanks.isEmpty()) { row.set("minium", rankAllBiobanks.get(0)); row.set("maximum", rankAllBiobanks.get(rankAllBiobanks.size() - 1)); double medianRank = 0; if (rankAllBiobanks.size() % 2 == 0) { medianRank = (double) (rankAllBiobanks.get(rankAllBiobanks.size() / 2 - 1) + rankAllBiobanks.get(rankAllBiobanks.size() / 2)) / 2; } else { medianRank = rankAllBiobanks.get(rankAllBiobanks.size() / 2); } row.set("median rank", medianRank); } sheetWriterRankStatistics.add(row); } } { lowerCaseBiobankNames.remove(0); sheetWriteBiobankRanks = excelWriterRanks.createWritable("biobank average ranks", lowerCaseBiobankNames); Entity entity = new MapEntity(); for (Entry<String, List<Integer>> entry : rankCollectionPerBiobank.entrySet()) { entity.set(entry.getKey(), averageRank(entry.getValue())); } sheetWriteBiobankRanks.add(entity); } { sheetWriteSpssInput = excelWriterRanks.createWritable("spss ranks", Arrays.asList("rank")); for (Object rank : allRanks) { Entity entity = new MapEntity("rank", rank); sheetWriteSpssInput.add(entity); } } } } } finally { if (reader != null) reader.close(); if (excelWriterRanks != null) IOUtils.closeQuietly(excelWriterRanks); } }
@Override public void verifyTestEntityAfterUpdate(Entity entity) throws Exception { assertEquals(entity.get("identifier"), "ONE"); assertEquals(entity.get("col1"), "ONE"); }
@Override public Map<String, DefaultEntityMetaData> getEntityMetaData(RepositoryCollection source) { // TODO: this task is actually a 'merge' instead of 'import' // so we need to consider both new metadata as existing ... Map<String, DefaultEntityMetaData> entities = new LinkedHashMap<String, DefaultEntityMetaData>(); // load attributes first (because entities are optional). for (Entity a : source.getRepositoryByEntityName("attributes")) { int i = 1; String entityName = a.getString("entity"); // required if (entityName == null) throw new IllegalArgumentException("attributes.entity is missing"); if (a.get("name") == null) throw new IllegalArgumentException("attributes.name is missing"); // create entity if not yet defined if (entities.get(entityName) == null) entities.put(entityName, new DefaultEntityMetaData(entityName)); DefaultEntityMetaData md = entities.get(entityName); DefaultAttributeMetaData am = new DefaultAttributeMetaData(a.getString("name")); if (a.get("dataType") != null) { FieldType t = MolgenisFieldTypes.getType(a.getString("dataType")); if (t == null) throw new IllegalArgumentException( "attributes.type error on line " + i + ": " + a.getString("dataType") + " unknown"); am.setDataType(t); } if (a.get("nillable") != null) am.setNillable(a.getBoolean("nillable")); if (a.get("auto") != null) am.setAuto(a.getBoolean("auto")); if (a.get("idAttribute") != null) am.setIdAttribute(a.getBoolean("idAttribute")); md.addAttributeMetaData(am); } // load all entities (optional) if (source.getRepositoryByEntityName("entities") != null) { int i = 1; for (Entity e : source.getRepositoryByEntityName("entities")) { i++; String entityName = e.getString("name"); // required if (entityName == null) throw new IllegalArgumentException("entity.name is missing on line " + i); if (entities.get(entityName) == null) entities.put(entityName, new DefaultEntityMetaData(entityName)); DefaultEntityMetaData md = entities.get(entityName); if (e.get("description") != null) md.setDescription(e.getString("description")); } } // re-iterate to map the mrefs/xref refEntity (or give error if not found) // TODO: consider also those in existing db int i = 1; for (Entity a : source.getRepositoryByEntityName("attributes")) { i++; if (a.get("refEntity") != null) { DefaultEntityMetaData em = entities.get(a.getString("entity")); DefaultAttributeMetaData am = (DefaultAttributeMetaData) em.getAttribute(a.getString("name")); if (entities.get(a.getString("refEntity")) == null) { throw new IllegalArgumentException( "attributes.refEntity error on line " + i + ": " + a.getString("refEntity") + " unknown"); } am.setRefEntity(entities.get(a.getString("refEntity"))); } } return entities; }