public void initialize(IInstance i) throws ThinklabException { Properties p = new Properties(); p.putAll(Geospace.get().getProperties()); IValue server = i.get("geospace:hasServiceUrl"); if (server != null) p.put(WCSCoverage.WCS_SERVICE_PROPERTY, server.toString()); IValue format = i.get("geospace:hasImageFormat"); if (format != null) p.put(WCSCoverage.WCS_FORMAT_PROPERTY, format.toString()); for (IRelationship r : i.getRelationships("geospace:hasNodataValue")) { IValue nodata = r.getValue(); if (nodata != null) { String s = p.getProperty(AbstractRasterCoverage.NODATA_PROPERTY, ""); if (s.length() > 0) s += ","; s += nodata.toString(); p.put(AbstractRasterCoverage.NODATA_PROPERTY, nodata.toString()); } } IValue transf = i.get(Geospace.HAS_TRANSFORMATION_EXPRESSION); if (transf != null) { this.transformation = new MVELExpression(transf.toString()); } String rid = i.get("geospace:hasCoverageId").toString(); Geospace.get().logger().info("reading WCS source " + server + "#" + rid); this.coverage = new WCSCoverage(rid, p); }
@Override public void initialize(IInstance i) throws ThinklabException { super.initialize(i); /* * these should be in already through reflection, but let's keep * the OWL way supported just in case. */ for (IRelationship r : i.getRelationships("modeltypes:hasClassifier")) { String[] rz = r.getValue().toString().split("->"); Pair<GeneralClassifier, IConcept> cls = new Pair<GeneralClassifier, IConcept>( new GeneralClassifier(rz[1]), KnowledgeManager.get().requireConcept(rz[0])); classifiers.add(cls); } /* * we have no guarantee that the universal classifier, if there, * will be last, given that it may come from an OWL multiproperty where * the orderding isn't guaranteed. * * scan the classifiers and if we have a universal classifier make sure * it's the last one, to avoid problems. */ int unidx = -1; int iz = 0; for (Pair<GeneralClassifier, IConcept> cls : classifiers) { if (cls.getFirst().isUniversal()) { unidx = iz; } iz++; } if (unidx >= 0 && unidx < classifiers.size() - 1) { ArrayList<Pair<GeneralClassifier, IConcept>> nc = new ArrayList<Pair<GeneralClassifier, IConcept>>(); for (iz = 0; iz < classifiers.size(); iz++) { if (iz != unidx) nc.add(classifiers.get(iz)); } nc.add(classifiers.get(unidx)); classifiers = nc; } /* * check if we have a nil classifier; if we don't we don't bother classifying * nulls and save some work. */ this.hasNilClassifier = false; for (Pair<GeneralClassifier, IConcept> cl : classifiers) { if (cl.getFirst().isNil()) { this.hasNilClassifier = true; break; } } IValue def = i.get(CoreScience.HAS_CONCEPTUAL_SPACE); if (def != null) cSpace = def.getConcept(); def = i.get("modeltypes:encodesContinuousDistribution"); if (def != null) continuousDistribution = MiscUtilities.parseDoubleVector(def.toString()); // TODO remove? if (continuousDistribution != null && getDataSource() != null && (getDataSource() instanceof IState)) ((IState) getDataSource()) .getMetadata() .put(Metadata.CONTINUOS_DISTRIBUTION_BREAKPOINTS, continuousDistribution); if (continuousDistribution != null) metadata.put(Metadata.CONTINUOS_DISTRIBUTION_BREAKPOINTS, continuousDistribution); if (classifiers != null) { metadata.put(Metadata.CLASSIFIERS, classifiers); IConcept[] rnk = null; /* * remap the values to ranks and determine how to rewire the input * if necessary, use classifiers instead of lexicographic order to * infer the appropriate concept order */ ArrayList<GeneralClassifier> cla = new ArrayList<GeneralClassifier>(); ArrayList<IConcept> con = new ArrayList<IConcept>(); for (Pair<GeneralClassifier, IConcept> op : classifiers) { cla.add(op.getFirst()); con.add(op.getSecond()); } Pair<double[], IConcept[]> pd = Metadata.computeDistributionBreakpoints(cSpace, cla, con); if (pd != null) { if (pd.getSecond()[0] != null) { rnk = pd.getSecond(); } } HashMap<IConcept, Integer> ranks = null; if (rnk == null) { ranks = Metadata.rankConcepts(cSpace, metadata); } else { ranks = Metadata.rankConcepts(cSpace, rnk, metadata); } } }