public void readBuildRun() throws ContentError, ParseError, ParseException, BuildException, XMLException, ConnectionError, RuntimeError { File simFile = new File(modelName); if (!simFile.exists()) { E.error("No such file: " + simFile.getAbsolutePath()); System.exit(1); } FileInclusionReader fir = new FileInclusionReader(simFile); if (typePath != null) { fir.addSearchPaths(typePath); } Sim sim = new Sim(fir.read()); sim.readModel(); sim.build(); boolean doRun = true; if (doRun) { sim.run(); E.info("Finished reading, building, running & displaying LEMS model"); } }
public void checkEquations(HashMap<String, Dimensional> cdimHM) throws ContentError { HashMap<String, Dimensional> dimHM = new HashMap<String, Dimensional>(); dimHM.putAll(cdimHM); for (RequiredVar rv : p_requiredVars) { dimHM.put(rv.getName(), rv.getDimensionality()); } for (StateVariable sv : stateVariables) { dimHM.put(sv.getName(), sv.getDimensionality()); } for (FinalParam fp : r_type.getFinalParams()) { dimHM.put(fp.getName(), fp.getDimensionality()); } for (InstanceProperty ip : r_type.getInstancePropertys()) { dimHM.put(ip.getName(), ip.getDimensionality()); } ExprDimensional tdim = new ExprDimensional(); tdim.setT(1); dimHM.put("t", tdim); for (DerivedVariable dv : derivedVariables) { try { dimHM.put(dv.getName(), dv.getDimensionality(dimHM)); } catch (ContentError ce) { E.error("Checking " + dv + " in " + r_type + " " + ce.getMessage()); } } for (ConditionalDerivedVariable cdv : conditionalDerivedVariables) { try { dimHM.put(cdv.getName(), cdv.getDimensionality(dimHM)); } catch (ContentError ce) { E.error("Checking " + cdv + " in " + r_type + " " + ce.getMessage()); } } for (TimeDerivative td : timeDerivatives) { td.checkDimensions(dimHM); } for (PointResponse pr : onStarts) { pr.checkEquations(dimHM); } for (PointResponse pr : onEvents) { pr.checkEquations(dimHM); } for (OnCondition oc : onConditions) { oc.checkEquations(dimHM); oc.checkConditionDimensions(dimHM); } }
public MatlabWriter(Lems lems) throws ModelFeatureSupportException, LEMSException, NeuroMLException { super(lems, "MATLAB"); MinimalMessageHandler.setVeryMinimal(true); E.setDebug(false); sli.checkAllFeaturesSupported(FORMAT, lems); }
private void runDiscreteUpdateComponent(File f1) throws ContentError, IOException, ParseError, ConnectionError, RuntimeError { String stxt = FileUtil.readStringFromFile(f1); XMLElementReader exmlr = new XMLElementReader(stxt + " "); XMLElement xel = exmlr.getRootElement(); LemsLiteFactory lf = new LemsLiteFactory(); LemsLite lemsLite = lf.buildLemsFromXMLElement(xel); E.info("lemsLite model read: " + lemsLite.getSummary()); // XMLSerializer xs = new XMLSerializer(); // String sx = xs.serialize(lemsLite); File fdir = f1.getParentFile(); LemsLiteSimulation lls = new LemsLiteSimulation(lemsLite); FileDataSource fds = new FileDataSource(fdir); lls.run(fds); }
public ParseTree parseCondition(String e) throws ParseError { ParseTree ret = null; ParseTree ev = parse(e); if (ev.isBoolean()) { ret = ev; } else { E.error("not a condition: " + e); } return ret; }
public ParseTree parseExpression(String e) throws ParseError { ParseTree ret = null; ParseTree ev = parse(e); if (ev.isFloat()) { ret = ev; } else { E.error("not an expression: " + e); } return ret; }
private void realResolve(Lems lems, Parser parser) throws ContentError, ParseError { if (lems.looseResolving()) { // then we expose anything with a name that matches the name of an exposure HashSet<String> expHS = new HashSet<String>(); for (Exposure exp : getComponentType().getExposures()) { expHS.add(exp.getName()); } for (StateVariable sv : stateVariables) { if (sv.exposure == null && expHS.contains(sv.name)) { sv.exposure = sv.name; E.shortWarning( "Implicitly exposing a state variable because its name matches an exposure: " + sv); } } for (DerivedVariable dv : derivedVariables) { if (dv.exposure == null && expHS.contains(dv.name)) { dv.exposure = dv.name; E.shortWarning( "Implicitly exposing a derived variable because its name matches an exposure: " + dv); } } for (ConditionalDerivedVariable dv : conditionalDerivedVariables) { if (dv.exposure == null && expHS.contains(dv.name)) { dv.exposure = dv.name; E.shortWarning( "Implicitly exposing a derived variable because its name matches an exposure: " + dv); } } } HashMap<String, Integer> exposedHM = new HashMap<String, Integer>(); for (Requirement req : getComponentType().getRequirements()) { p_requiredVars.add(new RequiredVar(req.getName(), req.getDimension())); } for (Exposure exp : getComponentType().getExposures()) { p_exposedVars.add(new ExposedVar(exp.getName(), exp.getDimension())); exposedHM.put(exp.getName(), 0); } checkMakeValHM(); addToMap(lems.getGlobals(), valHM); addToMap(lems.getConstantValueds(), valHM); for (DerivedVariable dvar : derivedVariables) { dvar.resolve(lems, lems.getDimensions(), r_type, valHM, parser); if (dvar.hasExposure()) { countExposure(dvar.getExposure(), exposedHM); } } for (ConditionalDerivedVariable dvar : conditionalDerivedVariables) { dvar.resolve(lems, lems.getDimensions(), r_type, valHM, parser); if (dvar.hasExposure()) { countExposure(dvar.getExposure(), exposedHM); } } for (StateVariable sv : stateVariables) { sv.resolve(r_type, lems.getDimensions()); if (sv.hasExposure()) { countExposure(sv.getExposure(), exposedHM); } } for (StateScalarField scf : stateScalarFields) { scf.resolve(r_type, lems.getDimensions()); } for (DerivedScalarField dcf : derivedScalarFields) { dcf.resolve(r_type, lems.getDimensions()); } for (DerivedPunctateField dpf : derivedPunctateFields) { dpf.resolve(r_type, lems.getDimensions()); } for (TimeDerivative sd : timeDerivatives) { sd.resolve(stateVariables, valHM, parser); } for (OnStart os : onStarts) { os.resolve(this, stateVariables, valHM, parser); } for (OnEvent oe : onEvents) { oe.resolve(this, stateVariables, valHM, parser); } for (OnCondition oc : onConditions) { oc.resolve(this, stateVariables, valHM, parser); } for (Regime reg : regimes) { reg.setBehavior(this); reg.resolve(stateVariables, lems, parser, exposedHM); } for (KineticScheme ks : kineticSchemes) { ks.resolve(r_type); } for (String enm : exposedHM.keySet()) { if (exposedHM.get(enm) == 0) { E.oneLineWarning( "No internal variable is linked to the exposure: " + enm + " for ComponentType " + getComponentType().getName()); } } }
public ParseTree parse(String ea) throws ParseError { String e = ea; if (verbose) { E.info("Parsing: " + e); } e = e.trim(); if (e.lastIndexOf("(") == 0 && e.indexOf(")") == e.length() - 1) { e = e.substring(1, e.length() - 1); // E.info("Replaced with: " + e); } ArrayList<Node> nodes = tokenize(e); // now we've got a list of tokens, and each is linked to is neighbor on either side if (verbose) { E.info("tokens: " + nodes); } // a group node to hold the whole lot, the same as is used for the content of bracketed chunks GroupNode groot = new GroupNode(null); groot.addAll(nodes); // some nodes will get replaced, but the operators remain throughout and will be needed later // to claim their operands. Use a list here so we can sort by precedence. ArrayList<AbstractOperatorNode> ops = new ArrayList<AbstractOperatorNode>(); ArrayList<GroupNode> gnodes = new ArrayList<GroupNode>(); ArrayList<FunctionNode> fnodes = new ArrayList<FunctionNode>(); for (Node n : nodes) { if (n instanceof AbstractOperatorNode) { ops.add((AbstractOperatorNode) n); } if (n instanceof GroupNode) { gnodes.add((GroupNode) n); } if (n instanceof FunctionNode) { fnodes.add((FunctionNode) n); } } // Right parentheses have been mapped to group nodes. Left parentheses // are still present. Make each group node claim the content back to the corresponding // opening parenthesis. By starting at the left and processing groups as // we come to them we don't need recursion for (GroupNode gn : gnodes) { gn.gatherPreceeding(); } if (verbose) { E.info("Root group: " + groot.toString()); } AbstractOperatorNode[] aops = ops.toArray(new AbstractOperatorNode[ops.size()]); Arrays.sort(aops, new PrecedenceComparator()); for (FunctionNode fn : fnodes) { fn.claim(); } for (AbstractOperatorNode op : aops) { op.claim(); } for (GroupNode gn : gnodes) { gn.supplantByChild(); } ParseTreeNode root = null; if (groot.getChildren().size() == 1) { Node fc = groot.getChildren().get(0); if (fc instanceof ParseTreeNode) { root = (ParseTreeNode) fc; } else { throw new ParseError("root node is not evaluable " + fc); } } else { StringBuilder sb = new StringBuilder(); sb.append(" too many children left in container: " + groot.getChildren().size()); sb.append("\n"); for (Node n : groot.getChildren()) { sb.append("Node: " + n + "\n"); } throw new ParseError(sb.toString()); } ParseTree ret = new ParseTree(root); return ret; }
public ArrayList<Node> tokenize(String e) { ArrayList<Node> ret = new ArrayList<Node>(); String ewk = disambiguate(e); for (String op : opHM.keySet()) { ewk = replaceAll(ewk, op, " " + op + " "); } ewk = replaceAll(ewk, "(", " ( "); ewk = replaceAll(ewk, ")", " ) "); ewk = reambiguate(ewk); if (verbose) { E.info("after spaces " + ewk); } Node pretok = null; // StringTokenizer st = new StringTokenizer(ewk, " "); // while (st.hasMoreTokens()) { // String stok = st.nextToken(); // avoiding tokenizer - not part of GWT String[] bits = ewk.split(" "); for (int i = 0; i < bits.length; i++) { String stok = bits[i]; stok = stok.trim(); if (stok.length() > 0) { Node n = null; if (stok.equals(")")) { n = new GroupNode(); } else if (stok.equals("(")) { n = new OpenNode(); } else if (funcHS.contains(stok)) { n = new FunctionNode(stok); } else if (opHM.containsKey(stok)) { n = opHM.get(stok).copy(); if (n instanceof MinusNode && pretok instanceof AbstractOperatorNode) { n = new UnaryMinusNode(); } } else if (snum.indexOf(stok.substring(0, 1)) >= 0) { n = new ConstantNode(stok); } else { n = new VariableNode(stok); } if (pretok != null) { pretok.linkNext(n); } pretok = n; ret.add(n); } } return ret; }
public String getMainScript() throws GenerationException, IOException { StringBuilder mainRunScript = new StringBuilder(); addComment( mainRunScript, format + " simulator compliant export for:\n\n" + lems.textSummary(false, false)); try { List<File> files = dlemsw.convert(); for (File file : files) { E.info("Writing " + format + " files to: " + this.getOutputFolder()); if (file.getName().equals(mainDlemsFile)) { VelocityUtils.initializeVelocity(); VelocityContext context = new VelocityContext(); VelocityEngine ve = VelocityUtils.getVelocityEngine(); String dlems = FileUtil.readStringFromFile(file); DLemsWriter.putIntoVelocityContext(dlems, context); addComment( mainRunScript, "Using the following distilled version of the LEMS model description for the script below:\n\n" + dlems); StringWriter sw1 = new StringWriter(); boolean generationStatus = ve.evaluate( context, sw1, "LOG", VelocityUtils.getTemplateAsReader(VelocityUtils.vertexRunTemplateFile)); mainRunScript.append(sw1); } else if (file.getName().indexOf(".synapse.") > 0) { StringBuilder synapseScript = new StringBuilder(); addComment( synapseScript, format + " simulator compliant export for:\n\n" + lems.textSummary(false, false)); E.info(" ==== Handling DLEMS file: " + file.getAbsolutePath()); VelocityUtils.initializeVelocity(); VelocityContext context = new VelocityContext(); VelocityEngine ve = VelocityUtils.getVelocityEngine(); String dlems = FileUtil.readStringFromFile(file); DLemsWriter.putIntoVelocityContext(dlems, context); StringWriter sw2 = new StringWriter(); boolean generationStatus2 = ve.evaluate( context, sw2, "LOG", VelocityUtils.getTemplateAsReader(VelocityUtils.vertexSynapseTemplateFile)); addComment( synapseScript, "Using the following distilled version of the LEMS model description for the script below:\n\n" + dlems); synapseScript.append(sw2); String name = (String) context.internalGet(DLemsKeywords.NAME.get()); E.info("Name: " + name); File synapseScriptFile = new File(this.getOutputFolder(), "SynapseModel_" + name.toLowerCase() + ".m"); FileUtil.writeStringToFile(synapseScript.toString(), synapseScriptFile); outputFiles.add(synapseScriptFile); } else { StringBuilder cellScript = new StringBuilder(); addComment( cellScript, format + " simulator compliant export for:\n\n" + lems.textSummary(false, false)); E.info(" ==== Handling DLEMS file: " + file.getAbsolutePath()); VelocityUtils.initializeVelocity(); VelocityContext context = new VelocityContext(); VelocityEngine ve = VelocityUtils.getVelocityEngine(); String dlems = FileUtil.readStringFromFile(file); DLemsWriter.putIntoVelocityContext(dlems, context); StringWriter sw2 = new StringWriter(); boolean generationStatus2 = ve.evaluate( context, sw2, "LOG", VelocityUtils.getTemplateAsReader(VelocityUtils.vertexCellTemplateFile)); addComment( cellScript, "Using the following distilled version of the LEMS model description for the script below:\n\n" + dlems); cellScript.append(sw2); String name = (String) context.internalGet(DLemsKeywords.NAME.get()); E.info("Name: " + name); File cellScriptFile = new File(this.getOutputFolder(), "PointNeuronModel_" + name.toLowerCase() + ".m"); FileUtil.writeStringToFile(cellScript.toString(), cellScriptFile); outputFiles.add(cellScriptFile); } } } catch (IOException e1) { throw new GenerationException("Problem converting LEMS to dLEMS", e1); } catch (VelocityException e) { throw new GenerationException("Problem using Velocity template", e); } return mainRunScript.toString(); }
private void initializeWriter() { MinimalMessageHandler.setVeryMinimal(true); E.setDebug(false); }