예제 #1
0
  // read all records in all files,
  // divide into groups based on GDS hash
  // each group has an arraylist of all records that belong to it.
  // for each group, run rectlizer to derive the coordinates and variables
  public List<Group> makeAggregatedGroups(
      ArrayList<String> filenames, CollectionManager.Force force, Formatter f) throws IOException {
    Map<Integer, Group> gdsMap = new HashMap<Integer, Group>();

    f.format("GribCollection %s: makeAggregatedGroups%n", gc.getName());
    int total = 0;
    int fileno = 0;
    for (CollectionManager dcm : collections) {
      // dcm.scanIfNeeded(); // LOOK ??
      f.format(" dcm= %s%n", dcm);
      Map<Integer, Integer> gdsConvert = (Map<Integer, Integer>) dcm.getAuxInfo("gdsHash");

      for (MFile mfile : dcm.getFiles()) {
        // f.format("%3d: %s%n", fileno, mfile.getPath());
        filenames.add(mfile.getPath());

        Grib2Index index = new Grib2Index();
        try {
          if (!index.readIndex(
              mfile.getPath(),
              mfile.getLastModified(),
              force)) { // heres where the index date is checked against the data file
            index.makeIndex(mfile.getPath(), f);
            f.format(
                "  Index written: %s == %d records %n",
                mfile.getName() + Grib2Index.IDX_EXT, index.getRecords().size());
          } else if (debug) {
            f.format(
                "  Index read: %s == %d records %n",
                mfile.getName() + Grib2Index.IDX_EXT, index.getRecords().size());
          }
        } catch (IOException ioe) {
          f.format(
              "GribCollectionBuilder: reading/Creating gbx9 index failed err=%s%n  skipping %s%n",
              ioe.getMessage(), mfile.getPath() + Grib2Index.IDX_EXT);
          continue;
        }

        for (Grib2Record gr : index.getRecords()) {
          gr.setFile(fileno); // each record tracks which file it belongs to
          int gdsHash =
              gr.getGDSsection().getGDS().hashCode(); // use GDS hash code to group records
          if (gdsConvert != null
              && gdsConvert.get(gdsHash)
                  != null) { // allow external config to muck with gdsHash. Why? because of error in
            // encoding
            gdsHash = (Integer) gdsConvert.get(gdsHash); // and we need exact hash matching
          }

          Group g = gdsMap.get(gdsHash);
          if (g == null) {
            g = new Group(gr.getGDSsection(), gdsHash);
            gdsMap.put(gdsHash, g);
          }
          g.records.add(gr);
          total++;
        }
        fileno++;
      }
    }
    f.format(" total grib records= %d%n", total);

    Grib2Rectilyser.Counter c = new Grib2Rectilyser.Counter();
    List<Group> result = new ArrayList<Group>(gdsMap.values());
    for (Group g : result) {
      g.rect = new Grib2Rectilyser(g.records, g.gdsHash);
      f.format(" GDS hash %d == ", g.gdsHash);
      g.rect.make(f, c);
    }
    f.format(
        " Rectilyser: nvars=%d records unique=%d total=%d dups=%d (%f) %n",
        c.vars, c.recordsUnique, c.records, c.dups, ((float) c.dups) / c.records);

    return result;
  }
  // read all records in all files,
  // divide into groups based on GDS hash
  // each group has an arraylist of all records that belong to it.
  // for each group, run rectlizer to derive the coordinates and variables
  public List<Group> makeAggregatedGroups(
      List<String> filenames, CollectionManager.Force force, Formatter f) throws IOException {
    Map<Integer, Group> gdsMap = new HashMap<Integer, Group>();
    boolean intvMerge = mergeIntvDefault;

    f.format("GribCollection %s: makeAggregatedGroups%n", gc.getName());
    int total = 0;
    int fileno = 0;

    for (CollectionManager dcm : collections) {
      f.format(" dcm= %s%n", dcm);
      FeatureCollectionConfig.GribConfig config =
          (FeatureCollectionConfig.GribConfig)
              dcm.getAuxInfo(FeatureCollectionConfig.AUX_GRIB_CONFIG);
      Map<Integer, Integer> gdsConvert = (config != null) ? config.gdsHash : null;
      FeatureCollectionConfig.GribIntvFilter intvMap = (config != null) ? config.intvFilter : null;
      intvMerge =
          (config == null) || (config.intvMerge == null) ? mergeIntvDefault : config.intvMerge;

      for (MFile mfile : dcm.getFiles()) {
        // f.format("%3d: %s%n", fileno, mfile.getPath());
        filenames.add(mfile.getPath());

        Grib2Index index = null;
        try {
          index =
              (Grib2Index)
                  GribIndex.readOrCreateIndexFromSingleFile(
                      false, !isSingleFile, mfile, config, force, f);

        } catch (IOException ioe) {
          logger.warn(
              "GribCollectionBuilder {}: reading/Creating gbx9 index failed err={}",
              gc.getName(),
              ioe.getMessage());
          f.format(
              "GribCollectionBuilder: reading/Creating gbx9 index failed err=%s%n  skipping %s%n",
              ioe.getMessage(), mfile.getPath() + GribIndex.IDX_EXT);
          continue;
        }

        for (Grib2Record gr : index.getRecords()) {
          if (this.tables == null) {
            Grib2SectionIdentification ids =
                gr.getId(); // so all records must use the same table (!)
            this.tables =
                Grib2Customizer.factory(
                    ids.getCenter_id(),
                    ids.getSubcenter_id(),
                    ids.getMaster_table_version(),
                    ids.getLocal_table_version());
            if (config != null)
              tables.setTimeUnitConverter(
                  config
                      .getTimeUnitConverter()); // LOOK doesnt really work with multiple collections
          }
          if (intvMap != null && filterTinv(gr, intvMap, f)) continue; // skip

          gr.setFile(fileno); // each record tracks which file it belongs to
          int gdsHash =
              gr.getGDSsection().getGDS().hashCode(); // use GDS hash code to group records
          if (gdsConvert != null
              && gdsConvert.get(gdsHash)
                  != null) // allow external config to muck with gdsHash. Why? because of error in
            // encoding
            gdsHash = (Integer) gdsConvert.get(gdsHash); // and we need exact hash matching

          Group g = gdsMap.get(gdsHash);
          if (g == null) {
            g = new Group(gr.getGDSsection(), gdsHash);
            gdsMap.put(gdsHash, g);
          }
          g.records.add(gr);
          total++;
        }
        fileno++;
      }
    }
    f.format(" total grib records= %d%n", total);

    Grib2Rectilyser.Counter c = new Grib2Rectilyser.Counter(); // debugging
    List<Group> result = new ArrayList<Group>(gdsMap.values());
    for (Group g : result) {
      g.rect = new Grib2Rectilyser(tables, g.records, g.gdsHash, intvMerge);
      f.format(" GDS hash %d == ", g.gdsHash);
      g.rect.make(f, c, filenames);
    }
    f.format(
        " Rectilyser: nvars=%d records unique=%d total=%d dups=%d (%f) %n",
        c.vars, c.recordsUnique, c.records, c.dups, ((float) c.dups) / c.records);

    return result;
  }