Node create(Node parent, Document doc) throws ThinklabException { Node ret = doc.createElement(tag); if (attrs != null) for (Pair<String, String> a : attrs) { Attr attr = doc.createAttribute(a.getFirst()); attr.setValue(a.getSecond()); ((Element) ret).setAttributeNode(attr); } for (Object o : contents) { if (o instanceof String) { String text = (String) o; XMLDocument.setTextContent(doc, ret, text); } else if (o instanceof Collection<?>) { for (Iterator<?> it = ((Collection<?>) o).iterator(); it.hasNext(); ) { Object no = it.next(); if (!(no instanceof XmlNode)) { throw new ThinklabValidationException("XML.node: collections must be of XmlNode"); } ret.appendChild(((XmlNode) no).create(ret, doc)); } } else if (o instanceof XmlNode) { ret.appendChild(((XmlNode) o).create(ret, doc)); } } return ret; }
void define(Node self, Document doc) throws ThinklabException { if (attrs != null) for (Pair<String, String> a : attrs) { Attr attr = doc.createAttribute(a.getFirst()); attr.setValue(a.getSecond()); ((Element) self).setAttributeNode(attr); } for (Object o : contents) { if (o instanceof String) { String text = (String) o; XMLDocument.setTextContent(doc, self, text); } else if (o instanceof Collection<?>) { for (Iterator<?> it = ((Collection<?>) o).iterator(); it.hasNext(); ) { Object no = it.next(); if (no instanceof XmlNode) { self.appendChild(((XmlNode) no).create(self, doc)); } else if (no instanceof Polylist) { self.appendChild(((Polylist) no).createXmlNode().create(self, doc)); } else { throw new ThinklabValidationException( "XML.node: collections must be of XmlNode or Polylist"); } } } else if (o instanceof XmlNode) { self.appendChild(((XmlNode) o).create(self, doc)); } else if (o instanceof Polylist) { self.appendChild(((Polylist) o).createXmlNode().create(self, doc)); } } }
public void exportKML() throws ThinklabException { Pair<String, String> uncu = application.getNewResourceUrl(".kmz", session); userModel.getStoryline().export(uncu.getFirst()); try { Filedownload.save(uncu.getSecond(), "application/vnd.google-earth.kmz"); } catch (FileNotFoundException e) { throw new ThinklabIOException(e); } }
public Object getValue(int idx, Object[] registers) { Object o = super.getValue(idx, registers); if (o instanceof Number && Double.isNaN(((Number) o).doubleValue())) o = null; if (o == null && !hasNilClassifier) return null; for (Pair<GeneralClassifier, IConcept> p : classifiers) { if (p.getFirst().classify(o)) { /* * create distribution, set 100% evidence for classified concept. */ return p.getSecond(); } } // null means "no data"; it can be caught using with a nil classifier return null; }
@Override public void initialize(IInstance i) throws ThinklabException { super.initialize(i); /* * these should be in already through reflection, but let's keep * the OWL way supported just in case. */ for (IRelationship r : i.getRelationships("modeltypes:hasClassifier")) { String[] rz = r.getValue().toString().split("->"); Pair<GeneralClassifier, IConcept> cls = new Pair<GeneralClassifier, IConcept>( new GeneralClassifier(rz[1]), KnowledgeManager.get().requireConcept(rz[0])); classifiers.add(cls); } /* * we have no guarantee that the universal classifier, if there, * will be last, given that it may come from an OWL multiproperty where * the orderding isn't guaranteed. * * scan the classifiers and if we have a universal classifier make sure * it's the last one, to avoid problems. */ int unidx = -1; int iz = 0; for (Pair<GeneralClassifier, IConcept> cls : classifiers) { if (cls.getFirst().isUniversal()) { unidx = iz; } iz++; } if (unidx >= 0 && unidx < classifiers.size() - 1) { ArrayList<Pair<GeneralClassifier, IConcept>> nc = new ArrayList<Pair<GeneralClassifier, IConcept>>(); for (iz = 0; iz < classifiers.size(); iz++) { if (iz != unidx) nc.add(classifiers.get(iz)); } nc.add(classifiers.get(unidx)); classifiers = nc; } /* * check if we have a nil classifier; if we don't we don't bother classifying * nulls and save some work. */ this.hasNilClassifier = false; for (Pair<GeneralClassifier, IConcept> cl : classifiers) { if (cl.getFirst().isNil()) { this.hasNilClassifier = true; break; } } IValue def = i.get(CoreScience.HAS_CONCEPTUAL_SPACE); if (def != null) cSpace = def.getConcept(); def = i.get("modeltypes:encodesContinuousDistribution"); if (def != null) continuousDistribution = MiscUtilities.parseDoubleVector(def.toString()); // TODO remove? if (continuousDistribution != null && getDataSource() != null && (getDataSource() instanceof IState)) ((IState) getDataSource()) .getMetadata() .put(Metadata.CONTINUOS_DISTRIBUTION_BREAKPOINTS, continuousDistribution); if (continuousDistribution != null) metadata.put(Metadata.CONTINUOS_DISTRIBUTION_BREAKPOINTS, continuousDistribution); if (classifiers != null) { metadata.put(Metadata.CLASSIFIERS, classifiers); IConcept[] rnk = null; /* * remap the values to ranks and determine how to rewire the input * if necessary, use classifiers instead of lexicographic order to * infer the appropriate concept order */ ArrayList<GeneralClassifier> cla = new ArrayList<GeneralClassifier>(); ArrayList<IConcept> con = new ArrayList<IConcept>(); for (Pair<GeneralClassifier, IConcept> op : classifiers) { cla.add(op.getFirst()); con.add(op.getSecond()); } Pair<double[], IConcept[]> pd = Metadata.computeDistributionBreakpoints(cSpace, cla, con); if (pd != null) { if (pd.getSecond()[0] != null) { rnk = pd.getSecond(); } } HashMap<IConcept, Integer> ranks = null; if (rnk == null) { ranks = Metadata.rankConcepts(cSpace, metadata); } else { ranks = Metadata.rankConcepts(cSpace, rnk, metadata); } } }