public void load() throws IOException {
    File file = getFile();
    if (file.exists()) {
      JsonObject rootObject = JSON_PARSER.parse(new FileReader(file)).getAsJsonObject();

      for (JsonElement element : rootObject.getAsJsonArray("players")) {
        Player player = new Player(element.getAsJsonObject());
        playerDB.put(player.getName(), player);
      }

      for (JsonElement element : rootObject.getAsJsonArray("groups")) {
        Group group = new Group(element.getAsJsonObject());
        groupDB.put(group.getName(), group);
      }
    } else {
      //noinspection ResultOfMethodCallIgnored
      file.createNewFile();
      JsonObject rootObject = new JsonObject();
      rootObject.add("players", new JsonArray());
      rootObject.add("groups", new JsonArray());
      BufferedWriter bw = new BufferedWriter(new FileWriter(file));
      bw.write(GSON.toJson(rootObject));
      bw.close();
    }
  }
Example #2
0
  public void parseGroup(Group g) throws Hdf5Exception, EndOfSequenceException {
    startGroup(g);

    java.util.List members = g.getMemberList();

    // NOTE: parsing contents twice to ensure subgroups are handled before datasets
    // This is mainly because synapse_props groups will need to be parsed before dataset of
    // connections

    for (int j = 0; j < members.size(); j++) {
      HObject obj = (HObject) members.get(j);

      if (obj instanceof Group) {
        Group subGroup = (Group) obj;

        logger.logComment("---------    Found a sub group: " + subGroup.getName());

        parseGroup(subGroup);
      }
    }

    for (int j = 0; j < members.size(); j++) {
      HObject obj = (HObject) members.get(j);

      if (obj instanceof Dataset) {
        Dataset ds = (Dataset) obj;

        logger.logComment("Found a dataset: " + ds.getName());

        dataSet(ds);
      }
    }

    endGroup(g);
  }
 protected void addPropertiesWithPrefix(Properties properties, Group group) {
   if (properties != null) {
     if (group != null) {
       String prefix = group.getName() + ".";
       Properties suffixProperties = group.getProperties();
       addPropertiesWithPrefix(properties, prefix, suffixProperties);
     }
   }
 }
  /*
   * Loads a concept group from XML element
   */
  private Group loadGroup(Element element) {
    Group group = new Group();
    group.name = element.getAttributeValue("name");

    List children = element.getChildren("concept");
    for (Iterator it = children.iterator(); it.hasNext(); ) {
      Concept concept = new Concept();
      Element e = (Element) it.next();
      concept.id = (e.getAttributeValue("id"));
      concept.rubric = (e.getAttributeValue("rubric"));
      group.addConcept(concept);
    }
    return group;
  }
Example #5
0
 GroupInvite(User sender, User receiver, Group group) {
   this.sender = sender;
   this.receiver = receiver;
   this.group = group;
   this.text =
       "<a href = profile.jsp?ID="
           + sender.getID()
           + ">"
           + sender.getDisplayName()
           + "</a>"
           + " has invited you to join "
           + "<a href profile.jsp?ID="
           + group.getID()
           + ">"
           + group.getDisplayName()
           + "</a>";
   type = "GroupInvite";
 }
  /**
   * Creates a notification to a Group coordinator signaling that a user wants to join their group
   *
   * <p>- Requires a groupId request parameter for the GET
   *
   * @param req The HTTP Request
   * @param res The HTTP Response
   */
  public void inviteAction(HttpServletRequest req, HttpServletResponse res) {
    // Ensure there is a cookie for the session user
    if (AccountController.redirectIfNoCookie(req, res)) return;

    Map<String, Object> viewData = new HashMap<String, Object>();

    int groupId = Integer.parseInt(req.getParameter("groupId"));

    try {

      // Get the session user
      HttpSession session = req.getSession();
      Session userSession = (Session) session.getAttribute("userSession");
      User user = userSession.getUser();

      // Get the coordinator for the group
      GroupManager groupMan = new GroupManager();
      Group group = groupMan.get(groupId);
      User coordinator = groupMan.getCoordinator(groupId);

      // Send a notification to the coordinator for them to permit access to the group
      NotificationManager notificationMan = new NotificationManager();
      Notification notification =
          new Notification(
              coordinator.getId(),
              coordinator,
              groupId,
              group,
              user.getFullName() + " wants to join your group " + group.getGroupName(),
              "/home/notifications?addUserId=" + user.getId() + "&groupId=" + group.getId());
      notificationMan.createNotification(notification);

      redirectToLocal(req, res, "/home/dashboard");
      return;

    } catch (Exception e) {
      redirectToLocal(req, res, "/home/dashboard");
    }
  }
Example #7
0
 /**
  * Converts the contents of the GroupInvite to a {@link java.lang.String} in order to save it to
  * file. The toString method will save the invite as follows: Sender: senderID Receiver:
  * receiverID Text: text ID : groupID
  */
 public String toString() {
   return ("Type:"
       + type
       + "\nSender:"
       + sender.getID()
       + "\nReceiver:"
       + receiver.getID()
       + "\nGroup:"
       + group.getID()
       + "\nText:"
       + text
       + "\n"); // Just for compile
 }
Example #8
0
  /** @param args */
  public static String Parsecsv(String data) {

    try {
      // CSVReader reader = new CSVReader(new FileReader(filename));
      StringReader str = new StringReader(data);
      CSVReader reader = new CSVReader(str);
      java.util.List<String[]> content = reader.readAll();
      HashMap<String, Group> groups = new HashMap<String, Group>();
      for (String[] row : content) {
        String Name = row[0];
        String Events = row[1];
        // System.out.printf("%S has attended %S events %n",Name, Events);

        if (groups.containsKey(Name)) {
          groups.get(Name).Add(Double.valueOf(Events));
        } else {
          Group g = new Group();
          g.Add(Double.valueOf(Events));
          groups.put(Name, g);
        }
      }
      reader.close();
      StringBuilder result = new StringBuilder();
      for (String name : groups.keySet()) {
        // System.out.printf("%S has attended %S events with average of %S
        // %n",name,groups.get(name).Gettotal(),groups.get(name).GetAverage());
        String val =
            String.format(
                "%S has attended %S events with average of %S %n",
                name, groups.get(name).Gettotal(), groups.get(name).GetAverage());
        result.append(val);
      }

      return result.toString();

    } catch (Exception e) {
      return e.toString();
    }
  }
  public void save() {
    try {
      File file = getFile();
      if (!file.exists()) // noinspection ResultOfMethodCallIgnored
      file.createNewFile();
      JsonObject rootObject = new JsonObject();

      JsonArray players = new JsonArray();
      for (Player player : playerDB.values()) players.add(player.toJson());
      rootObject.add("players", players);

      JsonArray groups = new JsonArray();
      for (Group group : groupDB.values()) groups.add(group.toJson());
      rootObject.add("groups", groups);

      BufferedWriter bw = new BufferedWriter(new FileWriter(file));
      bw.write(GSON.toJson(rootObject));
      bw.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
Example #10
0
 private void update() {
   try {
     group.subscribeTo(receiver);
     receiver.addGroup(group);
     Date date = new Date();
     receiver.post(
         "<a href = profile.jsp?ID="
             + receiver.getID()
             + ">"
             + receiver.getDisplayName()
             + "</a>"
             + " joined "
             + "<a href = profile.jsp?ID="
             + group.getID()
             + ">"
             + group.getDisplayName()
             + "</a>",
         receiver,
         date);
   } catch (Exception e) { // Fix to later return some sort of bs
     e.printStackTrace();
   }
 }
Example #11
0
  public void endGroup(Group g) throws Hdf5Exception {
    logger.logComment("-----   Going out of a group: " + g.getFullName());

    if (g.getName().equals(NetworkMLConstants.POPULATIONS_ELEMENT)) {
      inPopulations = false;
    } else if (g.getName().equals(NetworkMLConstants.PROJECTIONS_ELEMENT)) {
      inProjections = false;
    } else if (g.getName().equals(NetworkMLConstants.INPUTS_ELEMENT)) {
      inInputs = false;
    } else if (g.getName().equals(NetworkMLConstants.INPUT_ELEMENT) && inInputs) {
      currentInput = null;
    } else if (g.getName().startsWith(NetworkMLConstants.POPULATION_ELEMENT) && inPopulations) {
      currentCellGroup = null;
    } else if (g.getName().startsWith(NetworkMLConstants.PROJECTION_ELEMENT) && inProjections) {
      currentNetConn = null;
      globConnProps = new ArrayList<ConnSpecificProps>();
    } else if (g.getName().startsWith(NetworkMLConstants.CONNECTION_ELEMENT)) {
      localConnProps = new ArrayList<ConnSpecificProps>();
      localAPDelay = 0;
    }
  }
  /**
   * Drives the application.
   *
   * @param args first element would be the svn access configuration file
   * @throws Exception if it fails to generate a report
   */
  public void drive(String[] args) throws Exception {

    // We need an access config file in order to generate a report.
    if (args == null || args.length < 1) {
      System.out.println("=== SVN Access Auditor v1.0 ===");
      System.out.println("Outputs mapping information (HTML) between SVN repos and users. ");
      System.out.println("Usage:");
      System.out.println("  java -jar svnaccessauditor.jar <svnaccess.conf>");
      System.out.println();
      System.out.println("Output: repos.html");
      return;
    }

    File accessConfigFile = new File(args[0]);
    if (!accessConfigFile.exists() || accessConfigFile.isDirectory()) {
      System.out.println("File not exists at: " + args[0]);
      return;
    }

    System.out.println("Processing the records...");
    BufferedReader reader = new BufferedReader(new FileReader(accessConfigFile));
    String line;

    // Let the states handle the records.
    currentState = new InitialState(this);
    while ((line = reader.readLine()) != null) {
      currentState.process(line);
    }
    reader.close();

    // Add all users to EVERYONE group.
    for (User u : users.values()) {
      EVERYONE.addUser(u);
    }
    groups.put("EVERYONE", EVERYONE);

    reportGenerator = new HtmlReportGenerator(users, groups, repos);
    reportGenerator.generateReport();
  }
  // read all records in all files,
  // divide into groups based on GDS hash
  // each group has an arraylist of all records that belong to it.
  // for each group, run rectlizer to derive the coordinates and variables
  public List<Group> makeAggregatedGroups(
      List<String> filenames, CollectionManager.Force force, Formatter f) throws IOException {
    Map<Integer, Group> gdsMap = new HashMap<Integer, Group>();
    boolean intvMerge = mergeIntvDefault;

    f.format("GribCollection %s: makeAggregatedGroups%n", gc.getName());
    int total = 0;
    int fileno = 0;

    for (CollectionManager dcm : collections) {
      f.format(" dcm= %s%n", dcm);
      FeatureCollectionConfig.GribConfig config =
          (FeatureCollectionConfig.GribConfig)
              dcm.getAuxInfo(FeatureCollectionConfig.AUX_GRIB_CONFIG);
      Map<Integer, Integer> gdsConvert = (config != null) ? config.gdsHash : null;
      FeatureCollectionConfig.GribIntvFilter intvMap = (config != null) ? config.intvFilter : null;
      intvMerge =
          (config == null) || (config.intvMerge == null) ? mergeIntvDefault : config.intvMerge;

      for (MFile mfile : dcm.getFiles()) {
        // f.format("%3d: %s%n", fileno, mfile.getPath());
        filenames.add(mfile.getPath());

        Grib2Index index = null;
        try {
          index =
              (Grib2Index)
                  GribIndex.readOrCreateIndexFromSingleFile(
                      false, !isSingleFile, mfile, config, force, f);

        } catch (IOException ioe) {
          logger.warn(
              "GribCollectionBuilder {}: reading/Creating gbx9 index failed err={}",
              gc.getName(),
              ioe.getMessage());
          f.format(
              "GribCollectionBuilder: reading/Creating gbx9 index failed err=%s%n  skipping %s%n",
              ioe.getMessage(), mfile.getPath() + GribIndex.IDX_EXT);
          continue;
        }

        for (Grib2Record gr : index.getRecords()) {
          if (this.tables == null) {
            Grib2SectionIdentification ids =
                gr.getId(); // so all records must use the same table (!)
            this.tables =
                Grib2Customizer.factory(
                    ids.getCenter_id(),
                    ids.getSubcenter_id(),
                    ids.getMaster_table_version(),
                    ids.getLocal_table_version());
            if (config != null)
              tables.setTimeUnitConverter(
                  config
                      .getTimeUnitConverter()); // LOOK doesnt really work with multiple collections
          }
          if (intvMap != null && filterTinv(gr, intvMap, f)) continue; // skip

          gr.setFile(fileno); // each record tracks which file it belongs to
          int gdsHash =
              gr.getGDSsection().getGDS().hashCode(); // use GDS hash code to group records
          if (gdsConvert != null
              && gdsConvert.get(gdsHash)
                  != null) // allow external config to muck with gdsHash. Why? because of error in
            // encoding
            gdsHash = (Integer) gdsConvert.get(gdsHash); // and we need exact hash matching

          Group g = gdsMap.get(gdsHash);
          if (g == null) {
            g = new Group(gr.getGDSsection(), gdsHash);
            gdsMap.put(gdsHash, g);
          }
          g.records.add(gr);
          total++;
        }
        fileno++;
      }
    }
    f.format(" total grib records= %d%n", total);

    Grib2Rectilyser.Counter c = new Grib2Rectilyser.Counter(); // debugging
    List<Group> result = new ArrayList<Group>(gdsMap.values());
    for (Group g : result) {
      g.rect = new Grib2Rectilyser(tables, g.records, g.gdsHash, intvMerge);
      f.format(" GDS hash %d == ", g.gdsHash);
      g.rect.make(f, c, filenames);
    }
    f.format(
        " Rectilyser: nvars=%d records unique=%d total=%d dups=%d (%f) %n",
        c.vars, c.recordsUnique, c.records, c.dups, ((float) c.dups) / c.records);

    return result;
  }
  /**
   * Displays a given Research Group page for a HTTP Get, or creates a new Group for a HTTP Post
   *
   * <p>- Requires a cookie for the session user - Requires a groupId request parameter for a GET -
   * Requires a groupName, description, createdByUserId request parameters for a POST
   *
   * @param req The HTTP Request
   * @param res The HTTP Response
   */
  public void researchgroupAction(HttpServletRequest req, HttpServletResponse res) {
    // Ensure there is a cookie for the session user
    if (AccountController.redirectIfNoCookie(req, res)) return;

    Map<String, Object> viewData = new HashMap<String, Object>();
    viewData.put("title", "Research Group");

    if (req.getMethod() == HttpMethod.Get) {
      // Load group data into Map
      GroupManager gm = new GroupManager();
      int groupId = Integer.parseInt(req.getParameter("groupId"));
      Group group = gm.get(groupId);

      if (group != null) {
        // Load Group into map
        viewData.put("group", group);

        // Load group members into Map
        List<String> groupMembers = gm.getGroupMembers(groupId);
        viewData.put("groupMembers", groupMembers);

        // Load meetings into map
        MeetingManager meetMan = new MeetingManager();
        List<Meeting> groupMeetings = meetMan.getGroupMeetings(groupId);
        viewData.put("groupMeetings", groupMeetings);

        // Load Document Data into Map
        DocumentManager docMan = new DocumentManager();
        List<Document> groupDocuments = docMan.getGroupDocuments(groupId);
        viewData.put("groupDocuments", groupDocuments);

        // Load discussion threads
        DiscussionManager dm = new DiscussionManager();
        viewData.put("groupDiscussions", dm.getThreads(groupId));

        // Check if the user is a member
        boolean isMember = false;
        HttpSession session = req.getSession();
        Session userSession = (Session) session.getAttribute("userSession");
        User user = userSession.getUser();

        for (Group g : gm.getAllGroups(user.getId())) {
          if (g.getId() == group.getId()) {
            isMember = true;
            break;
          }
        }

        viewData.put("notMember", !isMember);

        // View group page.
        view(req, res, "/views/group/ResearchGroup.jsp", viewData);

      } else {
        httpNotFound(req, res);
      }

    } else if (req.getMethod() == HttpMethod.Post) {
      // Create Group

      // Get data from parameters
      String groupName = req.getParameter("groupName");
      String description = req.getParameter("description");
      int adminId = Integer.parseInt(req.getParameter("createdByUserId"));

      // Create the Group
      GroupManager groupMan = new GroupManager();
      Group group = new Group();
      group.setGroupName(groupName);
      group.setDescription(description);
      group.setCoordinatorId(adminId);
      // Create the mapping
      groupMan.createGroup(group);
      int groupId = groupMan.getIdFor(group);
      groupMan.createMapping(groupId, adminId);

      group.setId(groupId);

      // Update the User Session to show new group
      HttpSession session = req.getSession();
      Session userSession = (Session) session.getAttribute("userSession");
      User admin = userSession.getUser();
      admin.getGroups().add(group);

      // Show the Group Page
      viewData.put("groupName", group.getGroupName());
      List<String> groupMembers = groupMan.getGroupMembers(groupId);
      viewData.put("groupMembers", groupMembers);

      view(req, res, "/views/group/ResearchGroup.jsp", viewData);
    }
  }
  protected Group getJoinedGroup(String userRole, Group group1, Group group2) {
    Group res = null;

    {
      Principal principal = null;
      String id = null;
      String name = null;

      // Set 'principal':
      {
        principal = new SimplePrincipal(userRole);
      }

      // Set 'id':
      {
        id = userRole;
      }

      // Set 'name':
      {
        name = ApplicationUserRoles.getDisplayName(userRole);
      }

      if (group1 == null) {
        if (group2 == null) {
          res = null;
        } else {
          // Set result from 'group2':
          {
            AbstractUserAuthorizor.DefaultGroup g = new AbstractUserAuthorizor.DefaultGroup(group2);

            g.setName(name);
            g.setPrincipal(principal);

            res = g;
          }
        }
      } else {
        if (group2 == null) {
          // Set result from 'group1':
          {
            AbstractUserAuthorizor.DefaultGroup g = new AbstractUserAuthorizor.DefaultGroup(group1);

            g.setName(name);
            g.setPrincipal(principal);

            res = g;
          }
        } else {
          Properties properties = null;
          Map<String, List<String>> attributes = null;
          List<Principal> userPrincipals = null;
          List<Principal> superGroupPrincipals = null;
          List<Principal> groupPrincipals = null;

          // Set 'properties':
          {
            properties = new Properties();

            // Add properties from 'group1':
            {
              addPropertiesWithPrefix(properties, group1);
            }

            // Add properties from 'group2':
            {
              addPropertiesWithPrefix(properties, group2);
            }
          }

          // Set 'attributes':
          {
            Map<String, List<String>> attributes1 = group1.getAttributes();
            Map<String, List<String>> attributes2 = group2.getAttributes();

            attributes = joinAttributes(attributes1, attributes2);
          }

          // Set 'userPrincipals':
          {
            userPrincipals = new ArrayList<Principal>();

            // Add user principals from 'group1':
            {
              List<Principal> l = group1.getUserPrincipals();
              if (l != null) {
                userPrincipals.addAll(l);
              }
            }

            // Add user principals from 'group2':
            {
              List<Principal> l = group2.getUserPrincipals();
              if (l != null) {
                userPrincipals.addAll(l);
              }
            }
          }

          // Set 'superGroupPrincipals':
          {
            superGroupPrincipals = new ArrayList<Principal>();

            // Add super-group principals from 'group1':
            {
              List<Principal> l = group1.getSuperGroupPrincipals();
              if (l != null) {
                superGroupPrincipals.addAll(l);
              }
            }

            // Add super-group principals from 'group2':
            {
              List<Principal> l = group2.getSuperGroupPrincipals();
              if (l != null) {
                superGroupPrincipals.addAll(l);
              }
            }
          }

          // Set 'groupPrincipals':
          {
            groupPrincipals = new ArrayList<Principal>();

            // Add group principals from 'group1':
            {
              List<Principal> l = group1.getGroupPrincipals();
              if (l != null) {
                groupPrincipals.addAll(l);
              }
            }

            // Add group principals from 'group2':
            {
              List<Principal> l = group2.getGroupPrincipals();
              if (l != null) {
                groupPrincipals.addAll(l);
              }
            }
          }

          res =
              new AbstractUserAuthorizor.DefaultGroup(
                  principal,
                  id,
                  name,
                  properties,
                  attributes,
                  userPrincipals,
                  superGroupPrincipals,
                  groupPrincipals);
        }
      }
    }

    return res;
  }
  // read all records in all files,
  // divide into groups based on GDS hash
  // each group has an arraylist of all records that belong to it.
  // for each group, run rectlizer to derive the coordinates and variables
  public List<Group> makeAggregatedGroups(
      ArrayList<String> filenames, CollectionManager.Force force, Formatter f) throws IOException {
    Map<Integer, Group> gdsMap = new HashMap<Integer, Group>();

    f.format("GribCollection %s: makeAggregatedGroups%n", gc.getName());
    int total = 0;
    int fileno = 0;
    for (CollectionManager dcm : collections) {
      // dcm.scanIfNeeded(); // LOOK ??
      f.format(" dcm= %s%n", dcm);
      Map<Integer, Integer> gdsConvert = (Map<Integer, Integer>) dcm.getAuxInfo("gdsHash");

      for (MFile mfile : dcm.getFiles()) {
        // f.format("%3d: %s%n", fileno, mfile.getPath());
        filenames.add(mfile.getPath());

        Grib2Index index = new Grib2Index();
        try {
          if (!index.readIndex(
              mfile.getPath(),
              mfile.getLastModified(),
              force)) { // heres where the index date is checked against the data file
            index.makeIndex(mfile.getPath(), f);
            f.format(
                "  Index written: %s == %d records %n",
                mfile.getName() + Grib2Index.IDX_EXT, index.getRecords().size());
          } else if (debug) {
            f.format(
                "  Index read: %s == %d records %n",
                mfile.getName() + Grib2Index.IDX_EXT, index.getRecords().size());
          }
        } catch (IOException ioe) {
          f.format(
              "GribCollectionBuilder: reading/Creating gbx9 index failed err=%s%n  skipping %s%n",
              ioe.getMessage(), mfile.getPath() + Grib2Index.IDX_EXT);
          continue;
        }

        for (Grib2Record gr : index.getRecords()) {
          gr.setFile(fileno); // each record tracks which file it belongs to
          int gdsHash =
              gr.getGDSsection().getGDS().hashCode(); // use GDS hash code to group records
          if (gdsConvert != null
              && gdsConvert.get(gdsHash)
                  != null) { // allow external config to muck with gdsHash. Why? because of error in
            // encoding
            gdsHash = (Integer) gdsConvert.get(gdsHash); // and we need exact hash matching
          }

          Group g = gdsMap.get(gdsHash);
          if (g == null) {
            g = new Group(gr.getGDSsection(), gdsHash);
            gdsMap.put(gdsHash, g);
          }
          g.records.add(gr);
          total++;
        }
        fileno++;
      }
    }
    f.format(" total grib records= %d%n", total);

    Grib2Rectilyser.Counter c = new Grib2Rectilyser.Counter();
    List<Group> result = new ArrayList<Group>(gdsMap.values());
    for (Group g : result) {
      g.rect = new Grib2Rectilyser(g.records, g.gdsHash);
      f.format(" GDS hash %d == ", g.gdsHash);
      g.rect.make(f, c);
    }
    f.format(
        " Rectilyser: nvars=%d records unique=%d total=%d dups=%d (%f) %n",
        c.vars, c.recordsUnique, c.records, c.dups, ((float) c.dups) / c.records);

    return result;
  }
Example #17
0
  public void startGroup(Group g) throws Hdf5Exception {
    logger.logComment("-----   Going into a group: " + g.getFullName());

    ArrayList<Attribute> attrs = Hdf5Utils.parseGroupForAttributes(g);

    for (Attribute attribute : attrs) {
      // attribute.
      logger.logComment(
          "Group: "
              + g.getName()
              + " has attribute: "
              + attribute.getName()
              + " = "
              + Hdf5Utils.getFirstStringValAttr(attrs, attribute.getName()));
    }

    if (g.getName().equals(NetworkMLConstants.ROOT_ELEMENT)) {
      logger.logComment("Found the main group");

      String simConfigName =
          Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.NC_SIM_CONFIG);

      if (simConfigName != null) this.foundSimConfig = simConfigName;

      String randomSeed =
          Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.NC_NETWORK_GEN_RAND_SEED);

      if (randomSeed != null) this.foundRandomSeed = Long.parseLong(randomSeed);

    } else if (g.getName().equals(NetworkMLConstants.POPULATIONS_ELEMENT)) {
      logger.logComment("Found the pops group");
      inPopulations = true;

    } else if (g.getName().startsWith(NetworkMLConstants.POPULATION_ELEMENT) && inPopulations) {
      String name = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.POP_NAME_ATTR);

      logger.logComment("Found a population: " + name);
      currentCellGroup = name;
    } else if (g.getName().equals(NetworkMLConstants.PROJECTIONS_ELEMENT)) {
      logger.logComment("Found the projections group");
      inProjections = true;

      String units = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.UNITS_ATTR);

      projUnitSystem = UnitConverter.getUnitSystemIndex(units);

    } else if (g.getName().startsWith(NetworkMLConstants.PROJECTION_ELEMENT) && inProjections) {
      String name = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.PROJ_NAME_ATTR);
      String source = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.SOURCE_ATTR);
      String target = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.TARGET_ATTR);

      logger.logComment("Found a projection: " + name + " from " + source + " to " + target);

      if (!project.morphNetworkConnectionsInfo.isValidSimpleNetConn(name)
          && !project.volBasedConnsInfo.isValidVolBasedConn(name)) {
        throw new Hdf5Exception(
            "Error: there is a network connection with name: "
                + name
                + " specified in "
                + "that file, but no such NetConn exists in the project. Add one to allow import of this file");
      }

      /* TODO: Add checks on source & target!!
       */

      if (project.morphNetworkConnectionsInfo.isValidSimpleNetConn(name)) {
        // if (project.morphNetworkConnectionsInfo)
      }

      currentNetConn = name;
    } else if (g.getName().startsWith(NetworkMLConstants.SYN_PROPS_ELEMENT + "_")
        && inProjections) {
      String name = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.SYN_TYPE_ATTR);

      ConnSpecificProps cp = new ConnSpecificProps(name);

      String internalDelay =
          Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.INTERNAL_DELAY_ATTR);
      if (internalDelay != null)
        cp.internalDelay =
            (float)
                UnitConverter.getTime(
                    Float.parseFloat(internalDelay),
                    projUnitSystem,
                    UnitConverter.NEUROCONSTRUCT_UNITS);

      // Lump them in to the internal delay...
      String preDelay = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.PRE_DELAY_ATTR);
      if (preDelay != null)
        cp.internalDelay =
            cp.internalDelay
                + (float)
                    UnitConverter.getTime(
                        Float.parseFloat(preDelay),
                        projUnitSystem,
                        UnitConverter.NEUROCONSTRUCT_UNITS);

      String postDelay = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.POST_DELAY_ATTR);
      if (postDelay != null)
        cp.internalDelay =
            cp.internalDelay
                + (float)
                    UnitConverter.getTime(
                        Float.parseFloat(postDelay),
                        projUnitSystem,
                        UnitConverter.NEUROCONSTRUCT_UNITS);

      cp.weight =
          Float.parseFloat(Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.WEIGHT_ATTR));

      String propDelay = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.PROP_DELAY_ATTR);
      if (propDelay != null)
        globAPDelay =
            (float)
                UnitConverter.getTime(
                    Float.parseFloat(propDelay),
                    projUnitSystem,
                    UnitConverter.NEUROCONSTRUCT_UNITS);

      logger.logComment("Found: " + cp);

      globConnProps.add(cp);
    } else if (g.getName().equals(NetworkMLConstants.INPUTS_ELEMENT)) {
      logger.logComment("Found the Inputs group");
      inInputs = true;

      String units = Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.UNITS_ATTR);

      inputUnitSystem = UnitConverter.getUnitSystemIndex(units);
    } else if (g.getName().startsWith(NetworkMLConstants.INPUT_ELEMENT) && inInputs) {
      // The table of input sites is within the input group so get sites from here

      String inputName = g.getName().substring(6);

      // String inputName = Hdf5Utils.getFirstStringValAttr(attrs,
      // NetworkMLConstants.INPUT_ELEMENT);

      logger.logComment("Found an Input: " + inputName);
      // inInput = true;

      if (project.elecInputInfo.getStim(inputName) == null) {
        throw new Hdf5Exception(
            "Error: there is an electrical input with name: "
                + inputName
                + " specified in "
                + "that file, but no such electrical input exists in the project. Add one to allow import of this file");
      }
      // Get the atributes of the Input and compare them with the attributes within the project
      // Test to find out what type of input this is

    } else if (g.getName().startsWith("IClamp") && inInputs) {
      String inputName = g.getParent().getName().substring(6);
      // Get the input sites from the table

      String cellGroup =
          Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.INPUT_TARGET_POPULATION_ATTR);
      if (cellGroup == null) {
        cellGroup =
            Hdf5Utils.getFirstStringValAttr(
                attrs, NetworkMLConstants.INPUT_TARGET_CELLGROUP_OLD_ATTR); // check old name
      }

      float readDelay =
          (float)
              UnitConverter.getTime(
                  Float.parseFloat(
                      Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.INPUT_DELAY_ATTR)),
                  inputUnitSystem,
                  UnitConverter.NEUROCONSTRUCT_UNITS);
      float readDuration =
          (float)
              UnitConverter.getTime(
                  Float.parseFloat(
                      Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.INPUT_DUR_ATTR)),
                  inputUnitSystem,
                  UnitConverter.NEUROCONSTRUCT_UNITS);
      float readAmp =
          (float)
              UnitConverter.getCurrent(
                  Float.parseFloat(
                      Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.INPUT_AMP_ATTR)),
                  inputUnitSystem,
                  UnitConverter.NEUROCONSTRUCT_UNITS);

      StimulationSettings nextStim = project.elecInputInfo.getStim(inputName);
      ElectricalInput myElectricalInput = nextStim.getElectricalInput();
      IClamp ic = (IClamp) myElectricalInput;

      logger.logComment("Found an IClamp Input");

      float currDelay = -1, currDur = -1, currAmp = -1;

      /*
      try
      {
          ic.getDelay().reset();
          currDelay = ic.getDelay().getNumber();
          ic.getDuration().reset();
          currDur = ic.getDuration().getNumber();
          ic.getAmplitude().reset();
          currAmp = ic.getAmplitude().getNumber();
      }
      catch (Exception ex)
      {
          logger.logError("Legacy error getting iclamp params!!");
      }*/

      currDelay = ic.getDel().getNominalNumber();
      currDur = ic.getDur().getNominalNumber();
      currAmp = ic.getAmp().getNominalNumber();

      if ((!project.elecInputInfo.getStim(inputName).getCellGroup().equals(cellGroup))
          || (readDelay != currDelay)
          || (readDuration != currDur)
          || (readAmp != currAmp)) {
        throw new Hdf5Exception(
            "Error: the input properties of the file do not match those in the project for input "
                + inputName
                + ""
                + "\nreadDelay: "
                + readDelay
                + ", currDelay: "
                + currDelay
                + "\nreadDuration: "
                + readDuration
                + ", currDur: "
                + currDur
                + "\nreadAmp: "
                + readAmp
                + ", currAmp: "
                + currAmp
                + ", str: "
                + Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.INPUT_AMP_ATTR));
      }
      currentInput = inputName;
    } else if (g.getName().startsWith("RandomSpikeTrain") && inInputs) {
      String inputName = g.getParent().getName().substring(6);
      // Get the input sites from the table
      String cellGroup =
          Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.INPUT_TARGET_POPULATION_ATTR);
      if (cellGroup == null) {
        cellGroup =
            Hdf5Utils.getFirstStringValAttr(
                attrs, NetworkMLConstants.INPUT_TARGET_CELLGROUP_OLD_ATTR); // check old name
      }

      float frequency =
          (float)
              UnitConverter.getRate(
                  Float.parseFloat(
                      Hdf5Utils.getFirstStringValAttr(
                          attrs, NetworkMLConstants.RND_STIM_FREQ_ATTR)),
                  inputUnitSystem,
                  UnitConverter.NEUROCONSTRUCT_UNITS);
      String mechanism =
          Hdf5Utils.getFirstStringValAttr(attrs, NetworkMLConstants.RND_STIM_MECH_ATTR);

      StimulationSettings nextStim = project.elecInputInfo.getStim(inputName);
      ElectricalInput myElectricalInput = nextStim.getElectricalInput();
      RandomSpikeTrain rs = (RandomSpikeTrain) myElectricalInput;

      logger.logComment("Found an Random Spike Train Input");

      if ((!project.elecInputInfo.getStim(inputName).getCellGroup().equals(cellGroup))
          || frequency != rs.getRate().getFixedNum()
          || !rs.getSynapseType().equals(mechanism)) {
        throw new Hdf5Exception(
            "Error: the input properties of the file do not match those in the project for input "
                + inputName);
      }
      currentInput = inputName;
    }
  }
  private void createIndex(
      File indexFile, List<Group> groups, ArrayList<String> filenames, Formatter f)
      throws IOException {
    Grib2Record first = null; // take global metadata from here

    if (indexFile.exists()) indexFile.delete(); // replace it
    f.format(" createIndex for %s%n", indexFile.getPath());

    RandomAccessFile raf = new RandomAccessFile(indexFile.getPath(), "rw");
    raf.order(RandomAccessFile.BIG_ENDIAN);
    try {
      //// header message
      raf.write(MAGIC_START.getBytes("UTF-8"));
      raf.writeInt(version);
      long lenPos = raf.getFilePointer();
      raf.writeLong(0); // save space to write the length of the record section
      long countBytes = 0;
      int countRecords = 0;
      for (Group g : groups) {
        g.fileSet = new HashSet<Integer>();
        for (Grib2Rectilyser.VariableBag vb : g.rect.getGribvars()) {
          if (first == null) first = vb.first;
          GribCollectionProto.VariableRecords vr = writeRecordsProto(vb, g.fileSet);
          byte[] b = vr.toByteArray();
          vb.pos = raf.getFilePointer();
          vb.length = b.length;
          raf.write(b);
          countBytes += b.length;
          countRecords += vb.recordMap.length;
        }
      }
      long bytesPerRecord = countBytes / ((countRecords == 0) ? 1 : countRecords);
      f.format(
          "  write RecordMaps: bytes = %d record = %d bytesPerRecord=%d%n",
          countBytes, countRecords, bytesPerRecord);

      if (first == null) {
        logger.error("GribCollection {}: has no files\n{}", gc.getName(), f.toString());
        throw new IllegalArgumentException("GribCollection " + gc.getName() + " has no files");
      }

      long pos = raf.getFilePointer();
      raf.seek(lenPos);
      raf.writeLong(countBytes);
      raf.seek(pos); // back to the output.

      GribCollectionProto.GribCollectionIndex.Builder indexBuilder =
          GribCollectionProto.GribCollectionIndex.newBuilder();
      indexBuilder.setName(gc.getName());

      for (String fn : filenames) indexBuilder.addFiles(fn);

      for (Group g : groups) indexBuilder.addGroups(writeGroupProto(g));

      /* int count = 0;
      for (DatasetCollectionManager dcm : collections) {
        indexBuilder.addParams(makeParamProto(new Parameter("spec" + count, dcm.())));
        count++;
      } */

      // what about just storing first ??
      Grib2SectionIdentification ids = first.getId();
      indexBuilder.setCenter(ids.getCenter_id());
      indexBuilder.setSubcenter(ids.getSubcenter_id());
      indexBuilder.setMaster(ids.getMaster_table_version());
      indexBuilder.setLocal(ids.getLocal_table_version());

      Grib2Pds pds = first.getPDS();
      indexBuilder.setGenProcessType(pds.getGenProcessType());
      indexBuilder.setGenProcessId(pds.getGenProcessId());
      indexBuilder.setBackProcessId(pds.getBackProcessId());

      GribCollectionProto.GribCollectionIndex index = indexBuilder.build();
      byte[] b = index.toByteArray();
      NcStream.writeVInt(raf, b.length); // message size
      raf.write(b); // message  - all in one gulp
      f.format("  write GribCollectionIndex= %d bytes%n", b.length);

    } finally {
      f.format("  file size =  %d bytes%n", raf.length());
      raf.close();
      if (raf != null) raf.close();
    }
  }