コード例 #1
0
  public void parseGroup(Group g) throws Hdf5Exception, EndOfSequenceException {
    startGroup(g);

    java.util.List members = g.getMemberList();

    // NOTE: parsing contents twice to ensure subgroups are handled before datasets
    // This is mainly because synapse_props groups will need to be parsed before dataset of
    // connections

    for (int j = 0; j < members.size(); j++) {
      HObject obj = (HObject) members.get(j);

      if (obj instanceof Group) {
        Group subGroup = (Group) obj;

        logger.logComment("---------    Found a sub group: " + subGroup.getName());

        parseGroup(subGroup);
      }
    }

    for (int j = 0; j < members.size(); j++) {
      HObject obj = (HObject) members.get(j);

      if (obj instanceof Dataset) {
        Dataset ds = (Dataset) obj;

        logger.logComment("Found a dataset: " + ds.getName());

        dataSet(ds);
      }
    }

    endGroup(g);
  }
 /*
  * (non-Javadoc)
  *
  * @see org.geppetto.core.model.state.visitors.DefaultStateVisitor#visitSimpleStateNode(org.geppetto.core.model.state.SimpleStateNode)
  */
 @Override
 public boolean visitVariableNode(VariableNode node) {
   String variable = node.getInstancePath();
   H5File file = _recording.getHDF5();
   String variablePath = "/" + variable.replace(".", "/");
   Dataset v = (Dataset) FileFormat.findObject(file, variablePath);
   if (v != null) {
     Object dataRead;
     try {
       dataRead = v.read();
       Quantity quantity = new Quantity();
       AValue readValue = null;
       if (dataRead instanceof double[]) {
         double[] dr = (double[]) dataRead;
         readValue = ValuesFactory.getDoubleValue(dr[_currentIndex]);
       } else if (dataRead instanceof float[]) {
         float[] fr = (float[]) dataRead;
         readValue = ValuesFactory.getFloatValue(fr[_currentIndex]);
       } else if (dataRead instanceof int[]) {
         int[] ir = (int[]) dataRead;
         readValue = ValuesFactory.getIntValue(ir[_currentIndex]);
       }
       quantity.setValue(readValue);
       node.addQuantity(quantity);
     } catch (ArrayIndexOutOfBoundsException e) {
       _endOfSteps = e.getMessage();
     } catch (Exception | OutOfMemoryError e) {
       _errorMessage = e.getMessage();
     }
   }
   return super.visitVariableNode(node);
 }
  /*
   * (non-Javadoc)
   *
   * @see org.geppetto.core.model.state.visitors.DefaultStateVisitor#visitSimpleStateNode(org.geppetto.core.model.state.SimpleStateNode)
   */
  @Override
  public boolean visitSkeletonAnimationNode(SkeletonAnimationNode node) {
    String variable = node.getInstancePath();
    H5File file = _recording.getHDF5();
    String variablePath = "/" + variable.replace(".", "/");
    Dataset v = (Dataset) FileFormat.findObject(file, variablePath);
    if (v != null) {
      Object dataRead;
      try {
        dataRead = v.read();

        // get metadata from recording node
        List<Attribute> attributes = v.getMetadata();
        String meta = "";

        for (Attribute a : attributes) {
          if (a.getName().equals("custom_metadata")) meta = ((String[]) a.getValue())[0];
        }

        // split into key value pair
        Map<String, String> metaMap = StringSplitter.keyValueSplit(meta, ";");

        double[] flatMatrices = null;
        if (dataRead instanceof double[]) {
          double[] dr = (double[]) dataRead;

          // get items of interest based on matrix dimension and items per step
          int itemsPerStep = Integer.parseInt(metaMap.get("items_per_step"));
          int startIndex = _currentIndex * itemsPerStep;
          int endIndex = startIndex + (itemsPerStep);

          if (endIndex <= dr.length) {
            flatMatrices = Arrays.copyOfRange(dr, startIndex, endIndex);
          } else {
            throw new ArrayIndexOutOfBoundsException("ArrayIndexOutOfBounds");
          }
        }

        // set matrices on skeleton animation node
        node.addSkeletonTransformation(Arrays.asList(ArrayUtils.toObject(flatMatrices)));
      } catch (ArrayIndexOutOfBoundsException e) {
        _endOfSteps = e.getMessage();
      } catch (Exception | OutOfMemoryError e) {
        _errorMessage = e.getMessage();
      }
    }

    return super.visitSkeletonAnimationNode(node);
  }
コード例 #4
0
  List<GeoPointCarbon> occurrencesToList(Document document, Region region) {

    String DATASETNAME_LAT = "RetrievalGeometry/retrieval_latitude";
    String DATASETNAME_LONG = "RetrievalGeometry/retrieval_longitude";
    String DATASETNAME_XCO2 = "RetrievalResults/xco2";
    String DATASETNAME_XCO2_INTERF = "RetrievalResults/xco2_uncert_interf";
    String DATASETNAME_XCO2_NOISE = "RetrievalResults/xco2_uncert_noise";
    String DATASETNAME_DATE = "Metadata/OrbitStartDate";

    H5File file = null;
    Dataset latitude = null;
    Dataset longitude = null;
    Dataset xco2 = null;
    Dataset xco2_interf = null;
    Dataset xco2_noise = null;
    H5ScalarDS orbitStartDate = null;
    int latitude_dataspace_id = -1;
    int longitude_dataspace_id = -1;
    int xco2_dataspace_id = -1;
    int xco2_interf_dataspace_id = -1;
    int xco2_noise_dataspace_id = -1;
    int latitude_dataset_id = -1;
    int longitude_dataset_id = -1;
    int xco2_dataset_id = -1;
    int xco2_interf_dataset_id = -1;
    int xco2_noise_dataset_id = -1;
    long[] latitude_dims = {1};
    long[] longitude_dims = {1};
    long[] xco2_dims = {1};
    long[] xco2_interf_dims = {1};
    long[] xco2_noise_dims = {1};
    float[] latitude_data;
    float[] longitude_data;
    float[] xco2_data;
    float[] xco2_interf_data;
    float[] xco2_noise_data;
    String[] date_data = {""};
    List<GeoPointCarbon> listOfPoints = new ArrayList<>();

    try {

      // Open an existing file in the folder it is located.
      file = new H5File(document.getFileName(), FileFormat.READ);
      file.open();

      // Open latitude dataset.
      latitude = (Dataset) file.get(DATASETNAME_LAT);
      latitude_dataset_id = latitude.open();

      // Open longitude dataset.
      longitude = (Dataset) file.get(DATASETNAME_LONG);
      longitude_dataset_id = longitude.open();

      // Open xco2 dataset.
      xco2 = (Dataset) file.get(DATASETNAME_XCO2);
      xco2_dataset_id = xco2.open();

      // Get orbitStartDate dataset.
      orbitStartDate = (H5ScalarDS) file.get(DATASETNAME_DATE);

      // Get latitude dataspace and allocate memory for the read buffer.
      if (latitude_dataset_id >= 0) latitude_dataspace_id = H5.H5Dget_space(latitude_dataset_id);
      if (latitude_dataspace_id >= 0)
        H5.H5Sget_simple_extent_dims(latitude_dataspace_id, latitude_dims, null);

      // Get longitude dataspace and allocate memory for the read buffer.
      if (longitude_dataset_id >= 0) longitude_dataspace_id = H5.H5Dget_space(longitude_dataset_id);
      if (longitude_dataset_id >= 0)
        H5.H5Sget_simple_extent_dims(longitude_dataspace_id, longitude_dims, null);

      // Get xco2 dataspace and allocate memory for the read buffer.
      if (xco2_dataset_id >= 0) xco2_dataspace_id = H5.H5Dget_space(xco2_dataset_id);
      if (xco2_dataset_id >= 0) H5.H5Sget_simple_extent_dims(xco2_dataspace_id, xco2_dims, null);

      // Allocate array of pointers to rows.
      latitude_data = new float[(int) latitude_dims[0]];
      longitude_data = new float[(int) longitude_dims[0]];
      xco2_data = new float[(int) xco2_dims[0]];

      // Read the data using the default properties.
      latitude.init();
      latitude_data = (float[]) latitude.getData();

      longitude.init();
      longitude_data = (float[]) longitude.getData();

      xco2.init();
      xco2_data = (float[]) xco2.getData();

      date_data = (String[]) orbitStartDate.read();

      // Process data
      for (int i = 0; i < latitude_data.length; i++) {

        if (region.inRegion(latitude_data[i], longitude_data[i])) {
          listOfPoints.add(
              new GeoPointCarbon(
                  latitude_data[i],
                  longitude_data[i],
                  date_data[0],
                  (float) (xco2_data[i] * Math.pow(10, 6))));
        }
      }

      // End access to the latitude dataset and release resources used by it.
      if (latitude_dataset_id >= 0) latitude.close(latitude_dataset_id);
      if (latitude_dataspace_id >= 0) H5.H5Sclose(latitude_dataspace_id);

      // End access to the longitude dataset and release resources used by it.
      if (longitude_dataset_id >= 0) longitude.close(longitude_dataset_id);
      if (longitude_dataspace_id >= 0) H5.H5Sclose(longitude_dataspace_id);

      // End access to the xco2 dataset and release resources used by it.
      if (xco2_dataset_id >= 0) xco2.close(xco2_dataset_id);
      if (xco2_dataspace_id >= 0) H5.H5Sclose(xco2_dataspace_id);

      // Close the file.
      file.close();

    } catch (Exception e) {
      e.printStackTrace();
    }

    return listOfPoints;
  }
コード例 #5
0
ファイル: Dataset.java プロジェクト: karoscha/jhdf5
 /**
  * @deprecated Not for public use in the future. <br>
  *     Using {@link #convertToUnsignedC(Object, Object)}
  */
 @Deprecated
 public static Object convertToUnsignedC(Object data_in) {
   return Dataset.convertToUnsignedC(data_in, null);
 }
コード例 #6
0
  public void dataSet(Dataset d) throws Hdf5Exception {
    logger.logComment("-----   Looking through dataset: " + d);

    ArrayList<Attribute> attrs = Hdf5Utils.parseDatasetForAttributes(d);

    for (Attribute attribute : attrs) {
      logger.logComment(
          "Dataset: "
              + d.getName()
              + " has attribute: "
              + attribute.getName()
              + " = "
              + Hdf5Utils.getFirstStringValAttr(attrs, attribute.getName()));
    }

    float[][] data = Hdf5Utils.parse2Ddataset(d);

    logger.logComment("Data has size: (" + data.length + ", " + data[0].length + ")");

    if (inPopulations && currentCellGroup != null) {
      for (int i = 0; i < data.length; i++) {
        int id = (int) data[i][0];
        float x = data[i][1];
        float y = data[i][2];
        float z = data[i][3];

        PositionRecord posRec = new PositionRecord(id, x, y, z);

        if (data[0].length == 5) {
          posRec.setNodeId((int) data[i][4]);
        }

        this.project.generatedCellPositions.addPosition(currentCellGroup, posRec);
      }
    }
    if (inProjections && currentNetConn != null) {
      logger.logComment("Adding info for NetConn: " + currentNetConn);

      int id_col = -1;

      int pre_cell_id_col = -1;
      int pre_segment_id_col = -1;
      int pre_fraction_along_col = -1;

      int post_cell_id_col = -1;
      int post_segment_id_col = -1;
      int post_fraction_along_col = -1;

      int prop_delay_col = -1;

      for (Attribute attribute : attrs) {
        String storedInColumn = Hdf5Utils.getFirstStringValAttr(attrs, attribute.getName());

        if (storedInColumn.equals(NetworkMLConstants.CONNECTION_ID_ATTR)) {
          id_col = Integer.parseInt(attribute.getName().substring("column_".length()));
          logger.logComment("id col: " + id_col);
        } else if (storedInColumn.equals(NetworkMLConstants.PRE_CELL_ID_ATTR)) {
          pre_cell_id_col = Integer.parseInt(attribute.getName().substring("column_".length()));
        } else if (storedInColumn.equals(NetworkMLConstants.PRE_SEGMENT_ID_ATTR)) {
          pre_segment_id_col = Integer.parseInt(attribute.getName().substring("column_".length()));
          logger.logComment("pre_segment_id_col: " + pre_segment_id_col);
        } else if (storedInColumn.equals(NetworkMLConstants.PRE_FRACT_ALONG_ATTR)) {
          pre_fraction_along_col =
              Integer.parseInt(attribute.getName().substring("column_".length()));
          logger.logComment("pre_fraction_along_col: " + pre_fraction_along_col);
        } else if (storedInColumn.equals(NetworkMLConstants.POST_CELL_ID_ATTR)) {
          post_cell_id_col = Integer.parseInt(attribute.getName().substring("column_".length()));
        } else if (storedInColumn.equals(NetworkMLConstants.POST_SEGMENT_ID_ATTR)) {
          post_segment_id_col = Integer.parseInt(attribute.getName().substring("column_".length()));
        } else if (storedInColumn.equals(NetworkMLConstants.POST_FRACT_ALONG_ATTR)) {
          post_fraction_along_col =
              Integer.parseInt(attribute.getName().substring("column_".length()));
        } else if (storedInColumn.startsWith(NetworkMLConstants.PROP_DELAY_ATTR)) {
          prop_delay_col = Integer.parseInt(attribute.getName().substring("column_".length()));
        }

        for (String synType : getConnectionSynTypes()) {
          if (storedInColumn.endsWith(synType)) {
            ConnSpecificProps cp = null;

            for (ConnSpecificProps currCp : localConnProps) {
              if (currCp.synapseType.equals(synType)) cp = currCp;
            }
            if (cp == null) {
              cp = new ConnSpecificProps(synType);
              cp.internalDelay = -1;
              cp.weight = -1;
              localConnProps.add(cp);
            }

            if (storedInColumn.startsWith(NetworkMLConstants.INTERNAL_DELAY_ATTR)) {
              cp.internalDelay =
                  Integer.parseInt(
                      attribute
                          .getName()
                          .substring("column_".length())); // store the col num temporarily..
            }
            if (storedInColumn.startsWith(NetworkMLConstants.WEIGHT_ATTR)) {
              cp.weight =
                  Integer.parseInt(
                      attribute
                          .getName()
                          .substring("column_".length())); // store the col num temporarily..
            }
          }
        }
      }

      for (int i = 0; i < data.length; i++) {
        int pre_seg_id = 0;
        float pre_fract_along = 0.5f;
        int post_seg_id = 0;
        float post_fract_along = 0.5f;

        int id = (int) data[i][id_col];
        int pre_cell_id = (int) data[i][pre_cell_id_col];
        int post_cell_id = (int) data[i][post_cell_id_col];

        float prop_delay = 0;

        if (pre_segment_id_col >= 0) pre_seg_id = (int) data[i][pre_segment_id_col];
        if (pre_fraction_along_col >= 0) pre_fract_along = data[i][pre_fraction_along_col];
        if (post_segment_id_col >= 0) post_seg_id = (int) data[i][post_segment_id_col];
        if (post_fraction_along_col >= 0) post_fract_along = data[i][post_fraction_along_col];

        // (float)UnitConverter.getTime(XXXXXXXXX, UnitConverter.NEUROCONSTRUCT_UNITS,
        // unitSystem)+"";
        if (prop_delay_col >= 0)
          prop_delay =
              (float)
                  UnitConverter.getTime(
                      data[i][prop_delay_col], projUnitSystem, UnitConverter.NEUROCONSTRUCT_UNITS);

        ArrayList<ConnSpecificProps> props = new ArrayList<ConnSpecificProps>();

        if (localConnProps.size() > 0) {
          for (ConnSpecificProps currCp : localConnProps) {
            logger.logComment("Pre cp: " + currCp);
            ConnSpecificProps cp2 = new ConnSpecificProps(currCp.synapseType);

            if (currCp.internalDelay > 0) // index was stored in this val...
            cp2.internalDelay =
                  (float)
                      UnitConverter.getTime(
                          data[i][(int) currCp.internalDelay],
                          projUnitSystem,
                          UnitConverter.NEUROCONSTRUCT_UNITS);
            if (currCp.weight > 0) // index was stored in this val...
            cp2.weight = data[i][(int) currCp.weight];

            logger.logComment("Filled cp: " + cp2);

            props.add(cp2);
          }
        }

        this.project.generatedNetworkConnections.addSynapticConnection(
            currentNetConn,
            GeneratedNetworkConnections.MORPH_NETWORK_CONNECTION,
            pre_cell_id,
            pre_seg_id,
            pre_fract_along,
            post_cell_id,
            post_seg_id,
            post_fract_along,
            prop_delay,
            props);
      }
    }
    if (inInputs && currentInput != null) {
      logger.logComment("Adding info for: " + currentInput);
      StimulationSettings nextStim = project.elecInputInfo.getStim(currentInput);
      ElectricalInput myElectricalInput = nextStim.getElectricalInput();
      String electricalInputType = myElectricalInput.getType();
      String cellGroup = nextStim.getCellGroup();

      for (int i = 0; i < data.length; i++) {
        Float fileCellId = data[i][0];
        Float fileSegmentId = data[i][1];
        Float fractionAlong = data[i][2];
        int cellId = fileCellId.intValue();
        int segmentId = fileSegmentId.intValue();

        SingleElectricalInput singleElectricalInputFromFile =
            new SingleElectricalInput(
                electricalInputType, cellGroup, cellId, segmentId, fractionAlong, null);

        this.project.generatedElecInputs.addSingleInput(
            currentInput, singleElectricalInputFromFile);
      }
    }
  }