Ejemplo n.º 1
0
  @Override
  public EntitiesValidationReport validateImport(RepositoryCollection source) {
    EntitiesValidationReportImpl report = new EntitiesValidationReportImpl();

    // compare the data sheets against metadata in store or imported file
    Map<String, DefaultEntityMetaData> metaDataMap = getEntityMetaData(source);

    for (String sheet : source.getEntityNames())
      if (!"entities".equals(sheet) && !"attributes".equals(sheet)) {
        // check if sheet is known?
        if (metaDataMap.containsKey(sheet)) report.getSheetsImportable().put(sheet, true);
        else report.getSheetsImportable().put(sheet, false);

        // check the fields
        Repository s = source.getRepositoryByEntityName(sheet);
        EntityMetaData target = metaDataMap.get(sheet);

        if (target != null) {
          List<String> fieldsAvailable = new ArrayList<String>();
          List<String> fieldsImportable = new ArrayList<String>();
          List<String> fieldsRequired = new ArrayList<String>();
          List<String> fieldsUnknown = new ArrayList<String>();

          for (AttributeMetaData att : s.getEntityMetaData().getAttributes()) {
            if (target.getAttribute(att.getName()) == null) fieldsUnknown.add(att.getName());
            else fieldsImportable.add(att.getName());
          }
          for (AttributeMetaData att : target.getAttributes()) {
            if (!fieldsImportable.contains(att.getName())) {
              if (!att.isNillable()) fieldsRequired.add(att.getName());
              else fieldsAvailable.add(att.getName());
            }
          }

          report.getFieldsAvailable().put(sheet, fieldsAvailable);
          report.getFieldsRequired().put(sheet, fieldsRequired);
          report.getFieldsUnknown().put(sheet, fieldsUnknown);
          report.getFieldsImportable().put(sheet, fieldsImportable);
        }
      }
    return report;
  }
  public Collection<EntityMetaData> loadEMD(RepositoryCollection emdFormattedRepos) {
    // extract entity metadata
    Map<String, DefaultEntityMetaData> entities =
        new LinkedHashMap<String, DefaultEntityMetaData>();
    for (Entity e : emdFormattedRepos.getRepositoryByEntityName("entities")) {
      DefaultEntityMetaData emd = new DefaultEntityMetaData(e.getString("name"));
      if (e.getBoolean("abstract")) emd.setAbstract(true);
    }

    // extract extends relations
    for (Entity e : emdFormattedRepos.getRepositoryByEntityName("entities")) {
      if (e.get("extends") != null) {
        DefaultEntityMetaData emd = entities.get(e.get("name"));
        emd.setExtends(entities.get(e.get("extends")));
      }
    }

    Collection<EntityMetaData> result = new ArrayList<EntityMetaData>();
    result.addAll(entities.values());
    return result;
  }
  @Test
  public void integrationTestMetaData() {
    DataServiceImpl dataServiceImpl = Mockito.mock(DataServiceImpl.class);
    when(dataServiceImpl.hasRepository("attributes")).thenReturn(Boolean.FALSE); // To skip the
    // canIntegrateEntityMetadataCheck
    // test
    MetaDataServiceImpl metaDataService = new MetaDataServiceImpl(dataServiceImpl);
    RepositoryCollection repositoryCollection = Mockito.mock(RepositoryCollection.class);

    when(repositoryCollection.getEntityNames()).thenReturn(Lists.newArrayList("attributes"));

    DefaultEntityMetaData newEntityMetaData = new DefaultEntityMetaData("attributes");
    newEntityMetaData.addAttribute("ID");
    Repository repo1 = Mockito.mock(Repository.class);
    when(repositoryCollection.getRepository("attributes")).thenReturn(repo1);
    when(repo1.getEntityMetaData()).thenReturn(newEntityMetaData);

    LinkedHashMap<String, Boolean> entitiesImportable = new LinkedHashMap<String, Boolean>();
    entitiesImportable.put("attributes", true);

    assertEquals(entitiesImportable, metaDataService.integrationTestMetaData(repositoryCollection));
  }
Ejemplo n.º 4
0
  @Override
  @Transactional(rollbackFor = IOException.class)
  public EntityImportReport doImport(RepositoryCollection source, DatabaseAction databaseAction)
      throws IOException {
    if (store == null) throw new RuntimeException("store was not set");

    EntityImportReport report = new EntityImportReport();

    // TODO: need to change order

    Map<String, DefaultEntityMetaData> metadata = getEntityMetaData(source);

    for (Entry<String, DefaultEntityMetaData> entry : metadata.entrySet()) {
      String name = entry.getKey();
      if (!"entities".equals(name) && !"attributes".equals(name)) {
        Repository from = source.getRepositoryByEntityName(name);

        // TODO check if compatible with metadata

        // create repo if needed
        MysqlRepository to = (MysqlRepository) store.getRepositoryByEntityName(name);

        if (to == null) {
          logger.debug("tyring to create: " + name);

          EntityMetaData em = metadata.get(name);
          if (em == null) throw new IllegalArgumentException("Unknown entity: " + name);
          store.add(em);

          to = (MysqlRepository) store.getRepositoryByEntityName(name);
        }

        // import

        report.getNrImportedEntitiesMap().put(name, to.add(from));
      }
    }

    return report;
  }
  public Collection<EntityMetaData> loadOMX(RepositoryCollection omx) {
    // extract attribute metadata
    Map<String, AttributeMetaData> attributes = new LinkedHashMap<String, AttributeMetaData>();
    for (Entity e : omx.getRepositoryByEntityName("observablefeature")) {
      logger.debug("found observablefeature: " + e);

      DefaultAttributeMetaData att = new DefaultAttributeMetaData(e.getString("name"));
      if (e.get("dataType") != null)
        att.setDataType(MolgenisFieldTypes.getType(e.getString("dataType")));
      attributes.put(e.getString("identifier"), att);
      if (e.get("description") != null) att.setDescription(e.getString("description"));
      // TODO unit! if(e.get("unit") != null)

      if ("xref".equals(e.get("dataType")) || "mref".equals(e.get("dataType"))) {
        // TODO: cannot solve!!!
        att.setRefEntity(omxEntities.get("Characteristic"));
      }
      if ("categorical".equals(e.get("dataType"))) {
        att.setRefEntity(omxEntities.get("Category"));
      }
    }
    // TODO: fix categorical!

    // extract protocols as entities(abstract=true)
    Map<String, EntityMetaData> entities = new LinkedHashMap<String, EntityMetaData>();
    for (Entity e : omx.getRepositoryByEntityName("protocol")) {
      // skip all null entities
      if (hasValues(e)) {
        logger.debug("found protocol: " + e);

        DefaultEntityMetaData ent = new DefaultEntityMetaData(e.getString("identifier")); // alas
        ent.setLabel(e.getString("name"));
        ent.setAbstract(true);

        // add attributes
        if (e.get("features_identifier") != null)
          for (String attIdentifier : e.getList("features_identifier")) {
            if (attributes.get(attIdentifier) == null)
              throw new RuntimeException("attribute '" + attIdentifier + "' unknown");
            ent.addAttributeMetaData(attributes.get(attIdentifier));
          }

        entities.put(e.getString("identifier"), ent);
      }
    }

    for (Entity e : omx.getRepositoryByEntityName("protocol")) {
      // add subprotocols as compound
      if (e.get("subprotocols_identifier") != null) {
        for (String subProtocol : e.getList("subprotocols_identifier")) {
          DefaultAttributeMetaData att = new DefaultAttributeMetaData(subProtocol);
          att.setDataType(MolgenisFieldTypes.COMPOUND);
          att.setRefEntity(entities.get(subProtocol));
          ((DefaultEntityMetaData) entities.get(e.get("identifier"))).addAttributeMetaData(att);
        }
      }
    }

    // create dataset as entities
    for (Entity e : omx.getRepositoryByEntityName("dataset")) {
      logger.debug("found dataset: " + e);

      DefaultEntityMetaData ent = new DefaultEntityMetaData(e.getString("identifier"));
      ent.setLabel(e.getString("name"));
      // dataset 'extends' protocol
      ent.setExtends(entities.get(e.getString("protocolused_identifier")));
      entities.put(e.getString("identifier"), ent);
    }

    return entities.values();
  }
  @Override
  @Transactional(rollbackFor = IOException.class)
  public EntityImportReport doImport(
      RepositoryCollection repositories, DatabaseAction databaseAction) throws IOException {
    // All new repository identifiers
    List<String> newRepoIdentifiers = new ArrayList<String>();

    // First import entities, the data sheets are ignored in the entitiesimporter
    EntityImportReport importReport = entitiesImporter.importEntities(repositories, databaseAction);

    // RULE: Feature can only belong to one Protocol in a DataSet. Check it (see issue #1136)
    checkFeatureCanOnlyBelongToOneProtocolForOneDataSet();

    // Import data sheets
    for (String name : repositories.getEntityNames()) {
      Repository repository = repositories.getRepositoryByEntityName(name);

      if (repository.getName().startsWith(DATASET_SHEET_PREFIX)) {
        // Import DataSet sheet, create new OmxRepository
        String identifier = repository.getName().substring(DATASET_SHEET_PREFIX.length());

        if (!dataService.hasRepository(identifier)) {

          dataService.addRepository(
              new AggregateableCrudRepositorySecurityDecorator(
                  new OmxRepository(dataService, searchService, identifier, entityValidator)));
          newRepoIdentifiers.add(identifier);

          DataSet dataSet =
              dataService.findOne(
                  DataSet.ENTITY_NAME,
                  new QueryImpl().eq(DataSet.IDENTIFIER, identifier),
                  DataSet.class);

          List<Protocol> protocols =
              ProtocolUtils.getProtocolDescendants(dataSet.getProtocolUsed());
          List<ObservableFeature> categoricalFeatures = new ArrayList<ObservableFeature>();
          for (Protocol protocol : protocols) {
            List<ObservableFeature> observableFeatures = protocol.getFeatures();
            if (observableFeatures != null) {
              for (ObservableFeature observableFeature : observableFeatures) {
                String dataType = observableFeature.getDataType();
                FieldType type = MolgenisFieldTypes.getType(dataType);
                if (type.getEnumType() == FieldTypeEnum.CATEGORICAL) {
                  categoricalFeatures.add(observableFeature);
                }
              }
            }
          }
          for (ObservableFeature categoricalFeature : categoricalFeatures) {
            if (!dataService.hasRepository(
                OmxLookupTableEntityMetaData.createOmxLookupTableEntityMetaDataName(
                    categoricalFeature.getIdentifier()))) {
              dataService.addRepository(
                  new OmxLookupTableRepository(
                      dataService, categoricalFeature.getIdentifier(), queryResolver));
              newRepoIdentifiers.add(
                  OmxLookupTableEntityMetaData.createOmxLookupTableEntityMetaDataName(
                      categoricalFeature.getIdentifier()));
            }
          }
        }

        // Check if all column names in the excel sheet exist as attributes of the entity
        Set<ConstraintViolation> violations = Sets.newLinkedHashSet();
        EntityMetaData meta = dataService.getEntityMetaData(identifier);
        for (AttributeMetaData attr : repository.getEntityMetaData().getAttributes()) {
          if (meta.getAttribute(attr.getName()) == null) {
            String message =
                String.format(
                    "Unknown attributename '%s' for entity '%s'. Sheet: '%s'",
                    attr.getName(), meta.getName(), repository.getName());
            violations.add(new ConstraintViolation(message, attr.getName(), null, null, meta, 0));
          }
        }

        if (!violations.isEmpty()) {
          throw new MolgenisValidationException(violations);
        }

        // Import data into new OmxRepository
        try {
          dataService.add(identifier, repository);
        } catch (MolgenisValidationException e) {
          // Add sheet info
          for (ConstraintViolation violation : e.getViolations()) {
            if (violation.getRownr() > 0) {

              // Rownr +1 for header
              violation.setImportInfo(
                  String.format(
                      "Sheet: '%s', row: %d", repository.getName(), violation.getRownr() + 1));
            } else {
              violation.setImportInfo(String.format("Sheet: '%s'", repository.getName()));
            }
          }

          for (String newRepoIdentifier : newRepoIdentifiers) {
            dataService.removeRepository(newRepoIdentifier);
          }

          throw e;
        }

        int count = (int) RepositoryUtils.count(repository);
        importReport.addEntityCount(identifier, count);
        importReport.addNrImported(count);
      }
    }

    return importReport;
  }
Ejemplo n.º 7
0
  @Override
  public Map<String, DefaultEntityMetaData> getEntityMetaData(RepositoryCollection source) {
    // TODO: this task is actually a 'merge' instead of 'import'
    // so we need to consider both new metadata as existing ...

    Map<String, DefaultEntityMetaData> entities =
        new LinkedHashMap<String, DefaultEntityMetaData>();

    // load attributes first (because entities are optional).
    for (Entity a : source.getRepositoryByEntityName("attributes")) {
      int i = 1;
      String entityName = a.getString("entity");

      // required
      if (entityName == null) throw new IllegalArgumentException("attributes.entity is missing");
      if (a.get("name") == null) throw new IllegalArgumentException("attributes.name is missing");

      // create entity if not yet defined
      if (entities.get(entityName) == null)
        entities.put(entityName, new DefaultEntityMetaData(entityName));
      DefaultEntityMetaData md = entities.get(entityName);

      DefaultAttributeMetaData am = new DefaultAttributeMetaData(a.getString("name"));

      if (a.get("dataType") != null) {
        FieldType t = MolgenisFieldTypes.getType(a.getString("dataType"));
        if (t == null)
          throw new IllegalArgumentException(
              "attributes.type error on line " + i + ": " + a.getString("dataType") + " unknown");
        am.setDataType(t);
      }
      if (a.get("nillable") != null) am.setNillable(a.getBoolean("nillable"));
      if (a.get("auto") != null) am.setAuto(a.getBoolean("auto"));
      if (a.get("idAttribute") != null) am.setIdAttribute(a.getBoolean("idAttribute"));

      md.addAttributeMetaData(am);
    }

    // load all entities (optional)
    if (source.getRepositoryByEntityName("entities") != null) {
      int i = 1;
      for (Entity e : source.getRepositoryByEntityName("entities")) {
        i++;
        String entityName = e.getString("name");

        // required
        if (entityName == null)
          throw new IllegalArgumentException("entity.name is missing on line " + i);

        if (entities.get(entityName) == null)
          entities.put(entityName, new DefaultEntityMetaData(entityName));
        DefaultEntityMetaData md = entities.get(entityName);

        if (e.get("description") != null) md.setDescription(e.getString("description"));
      }
    }

    // re-iterate to map the mrefs/xref refEntity (or give error if not found)
    // TODO: consider also those in existing db
    int i = 1;
    for (Entity a : source.getRepositoryByEntityName("attributes")) {
      i++;
      if (a.get("refEntity") != null) {
        DefaultEntityMetaData em = entities.get(a.getString("entity"));
        DefaultAttributeMetaData am =
            (DefaultAttributeMetaData) em.getAttribute(a.getString("name"));

        if (entities.get(a.getString("refEntity")) == null) {
          throw new IllegalArgumentException(
              "attributes.refEntity error on line "
                  + i
                  + ": "
                  + a.getString("refEntity")
                  + " unknown");
        }

        am.setRefEntity(entities.get(a.getString("refEntity")));
      }
    }

    return entities;
  }