Ejemplo n.º 1
0
  public static byte[] GetByteArrayFromInputStream(InputStream is) {

    try {
      int length;
      int size = 1024;
      byte[] buffer;

      if (is instanceof ByteArrayInputStream) {
        size = is.available();
        buffer = new byte[size];
        is.read(buffer, 0, size);
      } else {
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        buffer = new byte[size];
        while ((length = is.read(buffer, 0, size)) != -1) {
          outputStream.write(buffer, 0, length);
        }

        buffer = outputStream.toByteArray();
      }
      return buffer;
    } catch (IOException e) {
      e.printStackTrace();
    } finally {
      try {
        is.close();
      } catch (Exception e) {
        tracer.warn(
            SessionLogcatAppender.MARKER_INTERNAL,
            "GetStringFromInputStream - could not close stream");
      }
    }

    return null;
  }
Ejemplo n.º 2
0
  /**
   * Loops through an input stream and converts it into a string, then closes the input stream
   *
   * @param is
   * @return
   */
  public static String GetStringFromInputStream(InputStream is) {
    String line;
    StringBuilder total = new StringBuilder();

    // Wrap a BufferedReader around the InputStream
    BufferedReader rd = new BufferedReader(new InputStreamReader(is));

    // Read response until the end
    try {
      while ((line = rd.readLine()) != null) {
        total.append(line);
      }
    } catch (IOException e) {
      e.printStackTrace();
    } finally {
      try {
        is.close();
      } catch (Exception e) {
        tracer.warn(
            SessionLogcatAppender.MARKER_INTERNAL,
            "GetStringFromInputStream - could not close stream");
      }
    }

    // Return full string
    return total.toString();
  }
Ejemplo n.º 3
0
  // scan has been done, create FmrcInv
  private FmrcInv makeFmrcInv(Formatter debug) throws IOException {
    try {
      Map<CalendarDate, FmrInv> fmrMap =
          new HashMap<CalendarDate, FmrInv>(); // all files are grouped by run date in an FmrInv
      List<FmrInv> fmrList = new ArrayList<FmrInv>(); // an fmrc is a collection of fmr

      // get the inventory, sorted by path
      for (MFile f : manager.getFiles()) {
        if (logger.isDebugEnabled()) logger.debug("Fmrc: " + config.spec + ": file=" + f.getPath());

        GridDatasetInv inv = null;
        try {
          inv =
              GridDatasetInv.open(
                  manager, f, config.innerNcml); // inventory is discovered for each GDS
        } catch (IOException ioe) {
          logger.warn("Error opening " + f.getPath() + "(skipped)", ioe);
          continue; // skip
        }

        CalendarDate runDate = inv.getRunDate();
        if (debug != null)
          debug.format("  opened %s rundate = %s%n", f.getPath(), inv.getRunDateString());

        // add to fmr for that rundate
        FmrInv fmr = fmrMap.get(runDate);
        if (fmr == null) {
          fmr = new FmrInv(runDate);
          fmrMap.put(runDate, fmr);
          fmrList.add(fmr);
        }
        fmr.addDataset(inv, debug);
      }
      if (debug != null) debug.format("%n");

      // finish the FmrInv
      Collections.sort(fmrList);
      for (FmrInv fmr : fmrList) {
        fmr.finish();
        if (logger.isDebugEnabled())
          logger.debug(
              "Fmrc: spec="
                  + config.spec
                  + ": fmr rundate="
                  + fmr.getRunDate()
                  + " nfiles= "
                  + fmr.getFiles().size());
      }

      return new FmrcInv(
          "fmrc:" + manager.getCollectionName(), fmrList, config.fmrcConfig.regularize);

    } catch (Throwable t) {
      logger.error("makeFmrcInv", t);
      throw new RuntimeException(t);
    }
  }
Ejemplo n.º 4
0
  public PlantData addPlant(Plant newPlant) {
    if (newPlant != null) {
      newPlant.save();
      this.plants.add(newPlant);
      this.save();
    } else {
      logger.warn("newPLant was NULL");
      System.out.println("newPLant was NULL");
    }

    return this;
  }
 public void actionRefresh(CommandRequest request) {
   try {
     String timeOutValue = request.getRequestObject().getParameter("refreshTimeOut");
     if (!StringUtils.isBlank(timeOutValue)) {
       try {
         autoRefreshTimeout = Integer.decode(timeOutValue).intValue();
       } catch (NumberFormatException e) {
         log.warn("Cannot parse auto refresh value as a number.");
       }
     }
     getDashboard().refresh();
   } catch (Exception e) {
     log.error("Cannot refresh dashboard.", e);
   }
 }
Ejemplo n.º 6
0
  /**
   * Get the parameter with id <tt>id</tt>.
   *
   * @param id the parameter id
   * @return the GridParameter
   */
  public GridParameter getParameter(int id) {
    GridParameter p = parameters.get(Integer.toString(id));
    if (p != null) return p;

    logger.warn(
        "GribPDSParamTable: Could not find parameter "
            + id
            + " for center:"
            + center_id
            + " subcenter:"
            + subcenter_id
            + " number:"
            + table_number
            + " table "
            + filename);
    String unknown = "UnknownParameter_" + Integer.toString(id) + "_table_" + filename;
    return new GridParameter(id, unknown, unknown, "Unknown");
  }
Ejemplo n.º 7
0
 private <T> T executionHandler(Callable<T> action) {
   int connectionAttempts = MAX_CONNECTION_ATTEMPTS;
   while (connectionAttempts > 0) {
     try {
       checkConnection();
       return action.call();
     } catch (FTPConnectionClosedException e) {
       LOGGER.debug("Exception", e);
       if (reconnect) {
         LOGGER.warn("Server closed connection. Reconnecting.");
         connectionAttempts--;
         connect();
       }
     } catch (Exception e) {
       LOGGER.debug("Exception", e);
       throw new VirtualFileException(e);
     }
   }
   return null;
 }
Ejemplo n.º 8
0
 /**
  * Method to get the content of a comma separated file (.csv,.input,.txt)
  *
  * @param CSV the File comma separated.
  * @param noHeaders if true jump the first line of the content.
  * @return the List of Array of the content of the File comma separated.
  */
 public static List<String[]> parseCSVFileAsList(File CSV, boolean noHeaders) {
   List<String[]> content;
   try {
     CSVReader reader1 = new CSVReader(new FileReader(CSV));
     content = reader1.readAll();
     /* List<String[]> myDatas = reader1.readAll();
     String[] lineI = myDatas.get(i);
     for (String[] line : myDatas) {
         for (String value : line) {
             //do stuff with value
         }
     }*/
     if (noHeaders) content.remove(0);
     if (content.get(0).length <= 1) {
       logger.warn(
           "Attention: You haven't parsed correctly the CSV file with OpenCSV API try with Univocity Method");
     }
     return content;
   } catch (IOException e) {
     logger.error("Can't find the CSV File:" + e.getMessage(), e);
     return null;
   }
 }
Ejemplo n.º 9
0
 /**
  * Method use OpenCsv Library for
  *
  * @param clazz the Class of the Bean.
  * @param fileInputCsv the File CSV to parse.
  * @param separator the char separator.
  * @param <T> the generic variable.
  * @return the List of Bean parsed from the CSV file.
  */
 public static <T> List<T> parseCSVFileAsList(Class<T> clazz, File fileInputCsv, char separator) {
   try {
     List<T> beans;
     try ( // create CSVReader object
     CSVReader reader = new CSVReader(new FileReader(fileInputCsv), separator)) {
       beans = new ArrayList<>();
       // read line by line
       String[] record;
       // skip header row
       String[] headers = reader.readNext();
       // read content
       while ((record = reader.readNext()) != null) {
         T t = ReflectionUtilities.invokeConstructor(clazz);
         for (int i = 0; i < record.length; i++) {
           String nameMethod = "set" + org.apache.commons.lang3.StringUtils.capitalize(headers[i]);
           // invoke setter method
           if (ReflectionUtilities.checkMethod(clazz, nameMethod)) {
             ReflectionUtilities.invokeSetter(t, nameMethod, record[i]);
           } else {
             logger.warn(
                 "Not exists the Method with name:"
                     + nameMethod
                     + " on the Bean:"
                     + t.getClass().getName());
           }
         }
         beans.add(t);
       }
     }
     return beans;
   } catch (IOException e) {
     logger.error(
         "Can't parse the CSV file:" + fileInputCsv.getAbsolutePath() + " -> " + e.getMessage(),
         e);
     return new ArrayList<>();
   }
 }
Ejemplo n.º 10
0
  /**
   * Add levels
   *
   * @param records GridRecords, one for each level
   */
  void addLevels(List<GridRecord> records) {
    for (int i = 0; i < records.size(); i++) {
      GridRecord record = records.get(i);

      if (coordIndex(record) < 0) {
        levels.add(new LevelCoord(record.getLevel1(), record.getLevel2()));
        if (dontUseVertical && (levels.size() > 1)) {
          if (GridServiceProvider.debugVert) {
            logger.warn(
                "GribCoordSys: unused level coordinate has > 1 levels = "
                    + levelName
                    + " "
                    + record.getLevelType1()
                    + " "
                    + levels.size());
          }
        }
      }
    }
    Collections.sort(levels);
    if (positive.equals("down")) {
      Collections.reverse(levels);
    }
  }
Ejemplo n.º 11
0
  @Override
  public void run() {
    CSWTransactionHelper helper;
    Map<String, String> wpsOutputMap = Maps.newHashMap();
    ReturnInfo info;
    RunMetadata metaObj = RunMetadata.getInstance(metadata);
    String compReq;
    String repo = props.getProperty("watersmart.sos.model.repo");
    String netCDFFailMessage = "NetCDF failed unexpectedly ";
    String cswResponse;
    UUID uuid = UUID.randomUUID();

    // -- Start here if we have a file and no SOS endpoint...
    // 1. Create NetCDF file
    // 2. Add results from NetCDF creation to CSW record
    // 3. Wait for THREDDS
    // -- Start here if we have an SOS endpoint
    // 4. Run the compare stats WPS process
    // 5. Add results from WPS process to CSW record

    if (StringUtils.isBlank(sosEndpoint)) {
      // SOS is blank which means we have to create it. Otherwise, the typical use
      // case is that the client is requesting a re-run

      // 1. Create NetCDF file
      try {
        log.debug("Creating NetCDF file");
        // CreateDSGFromZip.create() seems to cause a lot of grief. We keep getting:
        // java.lang.UnsatisfiedLinkError: Native Library
        // ${application_path}/loader/com/sun/jna/linux-amd64/libnetcdf.so already loaded in another
        // classloader
        // When developing and I see this, I have to restart the server and redeploy the project
        // The fault happens at
        // gov.usgs.cida.jna.NetCDFJNAInitializer.contextDestroyed(NetCDFJNAInitializer.java:21)
        info = CreateDSGFromZip.create(zipLocation, metaObj);
        if (info != null && info.properties != null) {
          netcdfSuccessful = true;
        } else {
          log.error(netCDFFailMessage);
          sendFailedEmail(new RuntimeException(netCDFFailMessage));
          throw new IOException("Output from NetCDF creation process was null");
        }
      } catch (IOException ex) {
        log.error(netCDFFailMessage, ex);
        sendFailedEmail(new RuntimeException(netCDFFailMessage));
        return;
      } catch (XMLStreamException ex) {
        log.error(netCDFFailMessage, ex);
        sendFailedEmail(new RuntimeException(netCDFFailMessage));
        return;
      } catch (RuntimeException ex) {
        log.error(netCDFFailMessage, ex);
        sendFailedEmail(new RuntimeException(netCDFFailMessage));
        return;
      }

      // The NetCDF process has passed so create a CSW record for the run and
      // insert what we have so far.
      // The WPS output will be updated once the process succeeds/fails.  The UI
      // will show "Process not yet completed" in the meantime.
      sosEndpoint = repo + metaObj.getTypeString() + "/" + info.filename;
      wpsOutputMap.put(WPSImpl.stats_compare, "");
      helper = new CSWTransactionHelper(metaObj, sosEndpoint, wpsOutputMap);
      // 2. Add results from NetCDF creation to CSW record
      try {
        log.debug("Adding results from NetCDF creation to CSW record");
        cswResponse = helper.addServiceIdentification();
        if (cswResponse != null) {
          cswTransSuccessful = true;
        } else {
          cswTransSuccessful = false;
          throw new IOException("Unable to update CSW Record");
        }
      } catch (Exception ex) {
        log.error("Failed to perform CSW insert", ex);
        sendFailedEmail(ex);
        return;
      }

      // 3. Wait for THREDDS
      try {
        log.debug("Beginning THREDDS wait period");
        Thread.sleep(SLEEP_FOR_THREDDS);
      } catch (InterruptedException ex) {
        // Typically we don't care about this, but we can log and move on.
        log.warn("THREDDS wait period was interrupted.");
        // If anything needs to be handled on an interruption, handle it here
      }
      log.trace("End of THREDDS wait period");
    }

    // 4. Run the compare stats using the R-WPS package
    try {
      log.debug("Sending request for compare stats");
      compReq = WPSImpl.createCompareStatsRequest(sosEndpoint);
      String algorithmOutput = runNamedAlgorithm("compare", compReq, uuid, metaObj);
      wpsOutputMap.put(WPSImpl.stats_compare, algorithmOutput);
    } catch (Exception ex) {
      log.error("Failed to run WPS algorithm", ex);
      sendFailedEmail(ex);
      return;
    }

    // 5. Add results from WPS process to CSW record
    if (wpsOutputMap.get(WPSImpl.stats_compare) != null) {
      log.debug("Stats compare completed successfully");
      rStatsSuccessful = true;
      helper = new CSWTransactionHelper(metaObj, sosEndpoint, wpsOutputMap);
      try {
        cswResponse = helper.updateRunMetadata(metaObj);
        cswTransSuccessful = cswResponse != null;
        sendCompleteEmail(wpsOutputMap);
      } catch (IOException ex) {
        log.error("Failed to perform CSW update", ex);
        sendFailedEmail(ex);
      } catch (URISyntaxException ex) {
        log.error("Failed to perform CSW update,", ex);
        sendFailedEmail(ex);
      }
    } else {
      log.error("Stats compare failed");
      sendFailedEmail(new Exception("Failed to run WPS algorithm"));
    }
  }
  private boolean testScan(String name, ArrayList group) {
    int datatype = name.equals("reflect") ? Level2Record.REFLECTIVITY : Level2Record.VELOCITY_HI;
    Level2Record first = (Level2Record) group.get(0);

    int n = group.size();
    if (debugScans) {
      boolean hasBoth = first.hasDopplerData && first.hasReflectData;
      System.out.println(
          name
              + " "
              + first
              + " has "
              + n
              + " radials resolution= "
              + first.resolution
              + " has both = "
              + hasBoth);
    }

    boolean ok = true;
    double sum = 0.0;
    double sum2 = 0.0;

    for (int i = 0; i < MAX_RADIAL; i++) radial[i] = 0;

    for (int i = 0; i < group.size(); i++) {
      Level2Record r = (Level2Record) group.get(i);

      /* this appears to be common - seems to be ok, we put missing values in
      if (r.getGateCount(datatype) != first.getGateCount(datatype)) {
        log.error(raf.getLocation()+" different number of gates ("+r.getGateCount(datatype)+
                "!="+first.getGateCount(datatype)+") in record "+name+ " "+r);
        ok = false;
      } */

      if (r.getGateSize(datatype) != first.getGateSize(datatype)) {
        log.warn(
            raf.getLocation()
                + " different gate size ("
                + r.getGateSize(datatype)
                + ") in record "
                + name
                + " "
                + r);
        ok = false;
      }
      if (r.getGateStart(datatype) != first.getGateStart(datatype)) {
        log.warn(
            raf.getLocation()
                + " different gate start ("
                + r.getGateStart(datatype)
                + ") in record "
                + name
                + " "
                + r);
        ok = false;
      }
      if (r.resolution != first.resolution) {
        log.warn(
            raf.getLocation()
                + " different resolution ("
                + r.resolution
                + ") in record "
                + name
                + " "
                + r);
        ok = false;
      }

      if ((r.radial_num < 0) || (r.radial_num >= MAX_RADIAL)) {
        log.info(
            raf.getLocation()
                + " radial out of range= "
                + r.radial_num
                + " in record "
                + name
                + " "
                + r);
        continue;
      }
      if (radial[r.radial_num] > 0) {
        log.warn(
            raf.getLocation()
                + " duplicate radial = "
                + r.radial_num
                + " in record "
                + name
                + " "
                + r);
        ok = false;
      }
      radial[r.radial_num] = r.recno + 1;

      sum += r.getElevation();
      sum2 += r.getElevation() * r.getElevation();
      // System.out.println("  elev="+r.getElevation()+" azi="+r.getAzimuth());
    }

    for (int i = 1; i < radial.length; i++) {
      if (0 == radial[i]) {
        if (n != (i - 1)) {
          log.warn(" missing radial(s)");
          ok = false;
        }
        break;
      }
    }

    double avg = sum / n;
    double sd = Math.sqrt((n * sum2 - sum * sum) / (n * (n - 1)));
    // System.out.println(" avg elev="+avg+" std.dev="+sd);

    return ok;
  }
Ejemplo n.º 13
0
  public boolean readIndex(String filename, long gribLastModified, CollectionManager.Force force)
      throws IOException {
    File idxFile = GribCollection.getIndexFile(filename + GBX9_IDX);
    if (!idxFile.exists()) return false;
    long idxModified = idxFile.lastModified();
    if ((force != CollectionManager.Force.nocheck) && (idxModified < gribLastModified))
      return false;
    // force new index if file was updated

    FileInputStream fin =
        new FileInputStream(idxFile); // LOOK need DiskCache for non-writeable directories

    try {
      //// check header is ok
      if (!NcStream.readAndTest(fin, MAGIC_START.getBytes())) {
        log.info("Bad magic number of grib index, on file" + idxFile);
        return false;
      }

      int v = NcStream.readVInt(fin);
      if (v != version) {
        if ((v == 0) || (v > version))
          throw new IOException(
              "Grib1Index found version " + v + ", want version " + version + " on " + filename);
        if (log.isDebugEnabled())
          log.debug(
              "Grib1Index found version " + v + ", want version " + version + " on " + filename);
        return false;
      }

      int size = NcStream.readVInt(fin);
      if (size <= 0 || size > 100 * 1000 * 1000) { // try to catch garbage
        log.warn("Grib1Index bad size = {} for {} ", size, filename);
        return false;
      }

      byte[] m = new byte[size];
      NcStream.readFully(fin, m);

      Grib1IndexProto.Grib1Index proto = Grib1IndexProto.Grib1Index.parseFrom(m);
      String fname = proto.getFilename();
      if (debug) System.out.printf("%s for %s%n", fname, filename);

      gdsList = new ArrayList<Grib1SectionGridDefinition>(proto.getGdsListCount());
      for (Grib1IndexProto.Grib1GdsSection pgds : proto.getGdsListList()) {
        Grib1SectionGridDefinition gds = readGds(pgds);
        gdsList.add(gds);
      }
      if (debug) System.out.printf(" read %d gds%n", gdsList.size());

      records = new ArrayList<Grib1Record>(proto.getRecordsCount());
      for (Grib1IndexProto.Grib1Record precord : proto.getRecordsList()) {
        records.add(readRecord(precord));
      }
      if (debug) System.out.printf(" read %d records%n", records.size());

    } catch (java.lang.NegativeArraySizeException e) {
      log.error("GribIndex failed on " + filename, e);
      return false;

    } catch (IOException e) {
      log.error("GribIndex failed on " + filename, e);
      return false;

    } finally {
      fin.close();
    }

    return true;
  }
  // do we have same characteristics for all groups in a variable?
  private boolean testVariable(String name, List scans) {
    int datatype = name.equals("reflect") ? Level2Record.REFLECTIVITY : Level2Record.VELOCITY_HI;
    if (scans.size() == 0) {
      log.warn(" No data for = " + name);
      return false;
    }

    boolean ok = true;
    List firstScan = (List) scans.get(0);
    Level2Record firstRecord = (Level2Record) firstScan.get(0);
    dopplarResolution = firstRecord.resolution;

    if (debugGroups2)
      System.out.println(
          "Group "
              + Level2Record.getDatatypeName(datatype)
              + " ngates = "
              + firstRecord.getGateCount(datatype)
              + " start = "
              + firstRecord.getGateStart(datatype)
              + " size = "
              + firstRecord.getGateSize(datatype));

    for (int i = 1; i < scans.size(); i++) {
      List scan = (List) scans.get(i);
      Level2Record record = (Level2Record) scan.get(0);

      if ((datatype == Level2Record.VELOCITY_HI)
          && (record.resolution
              != firstRecord.resolution)) { // do all velocity resolutions match ??
        log.warn(
            name
                + " scan "
                + i
                + " diff resolutions = "
                + record.resolution
                + ", "
                + firstRecord.resolution
                + " elev= "
                + record.elevation_num
                + " "
                + record.getElevation());
        ok = false;
        hasDifferentDopplarResolutions = true;
      }

      if (record.getGateSize(datatype) != firstRecord.getGateSize(datatype)) {
        log.warn(
            name
                + " scan "
                + i
                + " diff gates size = "
                + record.getGateSize(datatype)
                + " "
                + firstRecord.getGateSize(datatype)
                + " elev= "
                + record.elevation_num
                + " "
                + record.getElevation());
        ok = false;

      } else if (debugGroups2)
        System.out.println(
            " ok gates size elev= " + record.elevation_num + " " + record.getElevation());

      if (record.getGateStart(datatype) != firstRecord.getGateStart(datatype)) {
        log.warn(
            name
                + " scan "
                + i
                + " diff gates start = "
                + record.getGateStart(datatype)
                + " "
                + firstRecord.getGateStart(datatype)
                + " elev= "
                + record.elevation_num
                + " "
                + record.getElevation());
        ok = false;

      } else if (debugGroups2)
        System.out.println(
            " ok gates start elev= " + record.elevation_num + " " + record.getElevation());

      if (record.message_type == 31) {
        hasHighResolutionData = true;
        // each data type
        if (record.hasHighResREFData) hasHighResolutionREF = true;
        if (record.hasHighResVELData) hasHighResolutionVEL = true;
        if (record.hasHighResSWData) hasHighResolutionSW = true;
        if (record.hasHighResZDRData) hasHighResolutionZDR = true;
        if (record.hasHighResPHIData) hasHighResolutionPHI = true;
        if (record.hasHighResRHOData) hasHighResolutionRHO = true;
      }
    }

    return ok;
  }
  public CommandResponse actionFilter(CommandRequest request) {
    try {
      // Init attributes for start applying the filter.
      filterPropertyErrors.clear();

      // Parse parameters and set the filter.
      Iterator visiblePropertiesIt = properties.iterator();
      while (visiblePropertiesIt.hasNext()) {
        DashboardFilterProperty dashboardFilterProperty =
            (DashboardFilterProperty) visiblePropertiesIt.next();

        // Is property already in the dashboard filter?. Then is not possible to filter by this
        // property, it's already added to dashboard filter.
        if (dashboardFilterProperty.isBeingFiltered()) continue;
        if (!dashboardFilterProperty.isPropertyAlive()) {
          log.warn(
              "Trying to filter by "
                  + dashboardFilterProperty.getPropertyId()
                  + ". This property is not in any dataset.");
          continue;
        }
        if (!dashboardFilterProperty.isVisible()) continue;

        Object[] result;
        try {
          result =
              requestProcessor.parseDashboardProperty(
                  request.getRequestObject(), dashboardFilterProperty);
        } catch (Exception e) {
          log.error("Error parsing property " + dashboardFilterProperty.getPropertyId() + ".", e);
          continue;
        }

        if (result.length != 3) {
          log.error(
              "Error parsing property: '"
                  + dashboardFilterProperty.getPropertyId()
                  + "' for dataProvider: '"
                  + dashboardFilterProperty.getDataProviderCode()
                  + "'");
          continue;
        }

        Collection allowedValues = (Collection) result[0];
        Object minValue = result[1];
        Object maxValue = result[2];
        if (allowedValues == null && minValue == null && maxValue == null) continue;

        // Set the filter with this property.
        Dashboard currentDashboard = DashboardHandler.lookup().getCurrentDashboard();
        if (currentDashboard.filter(
            dashboardFilterProperty.getPropertyId(),
            minValue,
            true,
            maxValue,
            true,
            allowedValues,
            FilterByCriteria.ALLOW_ANY)) {
          return new ShowCurrentScreenResponse();
        }
      }
    } catch (Exception e) {
      log.error("Error trying to filter properties for dashboard", e);
    }
    return null;
  }
  /**
   * Write equivilent uncompressed version of the file.
   *
   * @param inputRaf file to uncompress
   * @param ufilename write to this file
   * @return raf of uncompressed file
   * @throws IOException on read error
   */
  private RandomAccessFile uncompress(RandomAccessFile inputRaf, String ufilename)
      throws IOException {
    RandomAccessFile outputRaf = new RandomAccessFile(ufilename, "rw");
    FileLock lock = null;

    while (true) { // loop waiting for the lock
      try {
        lock = outputRaf.getRandomAccessFile().getChannel().lock(0, 1, false);
        break;

      } catch (OverlappingFileLockException oe) { // not sure why lock() doesnt block
        try {
          Thread.sleep(100); // msecs
        } catch (InterruptedException e1) {
        }
      }
    }

    try {
      inputRaf.seek(0);
      byte[] header = new byte[Level2Record.FILE_HEADER_SIZE];
      inputRaf.read(header);
      outputRaf.write(header);

      boolean eof = false;
      int numCompBytes;
      byte[] ubuff = new byte[40000];
      byte[] obuff = new byte[40000];
      try {
        CBZip2InputStream cbzip2 = new CBZip2InputStream();
        while (!eof) {
          try {
            numCompBytes = inputRaf.readInt();
            if (numCompBytes == -1) {
              if (log.isDebugEnabled()) log.debug("  done: numCompBytes=-1 ");
              break;
            }
          } catch (EOFException ee) {
            log.warn("  got EOFException ");
            break; // assume this is ok
          }

          if (log.isDebugEnabled()) {
            log.debug(
                "reading compressed bytes "
                    + numCompBytes
                    + " input starts at "
                    + inputRaf.getFilePointer()
                    + "; output starts at "
                    + outputRaf.getFilePointer());
          }
          /*
           * For some stupid reason, the last block seems to
           * have the number of bytes negated.  So, we just
           * assume that any negative number (other than -1)
           * is the last block and go on our merry little way.
           */
          if (numCompBytes < 0) {
            if (log.isDebugEnabled()) log.debug("last block?" + numCompBytes);
            numCompBytes = -numCompBytes;
            eof = true;
          }
          byte[] buf = new byte[numCompBytes];
          inputRaf.readFully(buf);
          ByteArrayInputStream bis = new ByteArrayInputStream(buf, 2, numCompBytes - 2);

          // CBZip2InputStream cbzip2 = new CBZip2InputStream(bis);
          cbzip2.setStream(bis);
          int total = 0;
          int nread;
          /*
          while ((nread = cbzip2.read(ubuff)) != -1) {
            dout2.write(ubuff, 0, nread);
            total += nread;
          }
          */
          try {
            while ((nread = cbzip2.read(ubuff)) != -1) {
              if (total + nread > obuff.length) {
                byte[] temp = obuff;
                obuff = new byte[temp.length * 2];
                System.arraycopy(temp, 0, obuff, 0, temp.length);
              }
              System.arraycopy(ubuff, 0, obuff, total, nread);
              total += nread;
            }
            if (obuff.length >= 0) outputRaf.write(obuff, 0, total);
          } catch (BZip2ReadException ioe) {
            log.warn("Nexrad2IOSP.uncompress ", ioe);
          }
          float nrecords = (float) (total / 2432.0);
          if (log.isDebugEnabled())
            log.debug(
                "  unpacked "
                    + total
                    + " num bytes "
                    + nrecords
                    + " records; ouput ends at "
                    + outputRaf.getFilePointer());
        }

      } catch (Exception e) {
        if (outputRaf != null) outputRaf.close();
        outputRaf = null;

        // dont leave bad files around
        File ufile = new File(ufilename);
        if (ufile.exists()) {
          if (!ufile.delete())
            log.warn("failed to delete uncompressed file (IOException)" + ufilename);
        }

        if (e instanceof IOException) throw (IOException) e;
        else throw new RuntimeException(e);
      }

    } finally {
      if (null != outputRaf) outputRaf.flush();
      if (lock != null) lock.release();
    }

    return outputRaf;
  }
  // read all records in all files,
  // divide into groups based on GDS hash
  // each group has an arraylist of all records that belong to it.
  // for each group, run rectlizer to derive the coordinates and variables
  public List<Group> makeAggregatedGroups(
      List<String> filenames, CollectionManager.Force force, Formatter f) throws IOException {
    Map<Integer, Group> gdsMap = new HashMap<Integer, Group>();
    boolean intvMerge = mergeIntvDefault;

    f.format("GribCollection %s: makeAggregatedGroups%n", gc.getName());
    int total = 0;
    int fileno = 0;

    for (CollectionManager dcm : collections) {
      f.format(" dcm= %s%n", dcm);
      FeatureCollectionConfig.GribConfig config =
          (FeatureCollectionConfig.GribConfig)
              dcm.getAuxInfo(FeatureCollectionConfig.AUX_GRIB_CONFIG);
      Map<Integer, Integer> gdsConvert = (config != null) ? config.gdsHash : null;
      FeatureCollectionConfig.GribIntvFilter intvMap = (config != null) ? config.intvFilter : null;
      intvMerge =
          (config == null) || (config.intvMerge == null) ? mergeIntvDefault : config.intvMerge;

      for (MFile mfile : dcm.getFiles()) {
        // f.format("%3d: %s%n", fileno, mfile.getPath());
        filenames.add(mfile.getPath());

        Grib2Index index = null;
        try {
          index =
              (Grib2Index)
                  GribIndex.readOrCreateIndexFromSingleFile(
                      false, !isSingleFile, mfile, config, force, f);

        } catch (IOException ioe) {
          logger.warn(
              "GribCollectionBuilder {}: reading/Creating gbx9 index failed err={}",
              gc.getName(),
              ioe.getMessage());
          f.format(
              "GribCollectionBuilder: reading/Creating gbx9 index failed err=%s%n  skipping %s%n",
              ioe.getMessage(), mfile.getPath() + GribIndex.IDX_EXT);
          continue;
        }

        for (Grib2Record gr : index.getRecords()) {
          if (this.tables == null) {
            Grib2SectionIdentification ids =
                gr.getId(); // so all records must use the same table (!)
            this.tables =
                Grib2Customizer.factory(
                    ids.getCenter_id(),
                    ids.getSubcenter_id(),
                    ids.getMaster_table_version(),
                    ids.getLocal_table_version());
            if (config != null)
              tables.setTimeUnitConverter(
                  config
                      .getTimeUnitConverter()); // LOOK doesnt really work with multiple collections
          }
          if (intvMap != null && filterTinv(gr, intvMap, f)) continue; // skip

          gr.setFile(fileno); // each record tracks which file it belongs to
          int gdsHash =
              gr.getGDSsection().getGDS().hashCode(); // use GDS hash code to group records
          if (gdsConvert != null
              && gdsConvert.get(gdsHash)
                  != null) // allow external config to muck with gdsHash. Why? because of error in
            // encoding
            gdsHash = (Integer) gdsConvert.get(gdsHash); // and we need exact hash matching

          Group g = gdsMap.get(gdsHash);
          if (g == null) {
            g = new Group(gr.getGDSsection(), gdsHash);
            gdsMap.put(gdsHash, g);
          }
          g.records.add(gr);
          total++;
        }
        fileno++;
      }
    }
    f.format(" total grib records= %d%n", total);

    Grib2Rectilyser.Counter c = new Grib2Rectilyser.Counter(); // debugging
    List<Group> result = new ArrayList<Group>(gdsMap.values());
    for (Group g : result) {
      g.rect = new Grib2Rectilyser(tables, g.records, g.gdsHash, intvMerge);
      f.format(" GDS hash %d == ", g.gdsHash);
      g.rect.make(f, c, filenames);
    }
    f.format(
        " Rectilyser: nvars=%d records unique=%d total=%d dups=%d (%f) %n",
        c.vars, c.recordsUnique, c.records, c.dups, ((float) c.dups) / c.records);

    return result;
  }
Ejemplo n.º 18
0
  /** Read parameter table. */
  private void readParameterTable() {
    if (path == null) {
      logger.error("GribPDSParamTable: unknown path for " + this);
      return;
    }

    try {
      InputStream is = GribResourceReader.getInputStream(path);
      if (is == null) {
        logger.error("GribPDSParamTable: error getInputStream on " + this);
        return;
      }
      BufferedReader br = new BufferedReader(new InputStreamReader(is));

      // Read first line that has center, subcenter, and version of table
      String line = br.readLine();
      if (debug) System.out.println(line);
      String[] tableDefArr = line.split(":");

      /*  LOOK - why not test values ?
      center    = Integer.parseInt(tableDefArr[1].trim());
      subcenter = Integer.parseInt(tableDefArr[2].trim());
      number    = Integer.parseInt(tableDefArr[3].trim());
      if ((center != center_id) && (subcenter != subcenter_id)
              && (number != table_number)) {
          throw new java.io.IOException(
              "parameter table header values do not "
              + " match values in GRIB file.  Possible error in lookup table.");
      }
      */

      HashMap<String, GridParameter> tmpParameters =
          new HashMap<String, GridParameter>(); // thread safe - temp hash

      // rdg - added the 0 line length check to cover the case of blank lines at
      //       the end of the parameter table file.
      while ((line = br.readLine()) != null) {
        if ((line.length() == 0) || line.startsWith("#")) {
          continue;
        }
        GridParameter parameter = new GridParameter();
        tableDefArr = line.split(":");
        parameter.setNumber(Integer.parseInt(tableDefArr[0].trim()));
        parameter.setName(tableDefArr[1].trim());
        // check to see if unit defined, if not, parameter is undefined
        if (tableDefArr[2].indexOf('[') == -1) {
          // Undefined unit
          parameter.setDescription(tableDefArr[2].trim());
          parameter.setUnit(tableDefArr[2].trim());
        } else {
          String[] arr2 = tableDefArr[2].split("\\[");
          parameter.setDescription(makeValidDesc(arr2[0].trim()));
          // System.out.println( "Desc ="+ parameter.getDescription());
          // Remove "]"
          parameter.setUnit(arr2[1].substring(0, arr2[1].lastIndexOf(']')).trim());
        }
        tmpParameters.put(Integer.toString(parameter.getNumber()), parameter);
        if (debug)
          System.out.println(
              parameter.getNumber() + " " + parameter.getDescription() + " " + parameter.getUnit());
      }

      this.parameters = tmpParameters; // thread safe

    } catch (IOException ioError) {
      logger.warn(
          "An error occurred in GribPDSParamTable while trying to open the parameter table "
              + filename
              + " : "
              + ioError);
    }
  }
Ejemplo n.º 19
0
  /**
   * Looks for the parameter table which matches the center, subcenter and table version from the
   * tables array. If this is the first time asking for this table, then the parameters for this
   * table have not been read in yet, so this is done as well.
   *
   * @param center - integer from PDS octet 5, representing Center.
   * @param subcenter - integer from PDS octet 26, representing Subcenter
   * @param number - integer from PDS octet 4, representing Parameter Table Version
   * @param firstTry - Is this the first call or are we trying the wild cards
   * @return GribPDSParamTable matching center, subcenter, and number
   */
  private static GribPDSParamTable readParameterTable(
      int center, int subcenter, int number, boolean firstTry) {

    if (firstTry) wmoTable = number;

    GribPDSParamTable[] localCopy = paramTables; // thread safe

    for (GribPDSParamTable table : localCopy) {

      if (center == table.center_id) {
        if ((table.subcenter_id == -1) || (subcenter == table.subcenter_id)) {
          if (number == table.table_number) {
            // now that this table is being used, check to see if the
            //   parameters for this table have been read in yet.
            // If not, initialize table and read them in now.
            if (table.parameters == null) {
              if (!firstTry) {
                logger.warn(
                    "GribPDSParamTable: Using default table:"
                        + table.path
                        + " ("
                        + table.center_id
                        + ":"
                        + table.subcenter_id
                        + ":"
                        + table.table_number
                        + ")");
              }
              table.readParameterTable();
              if (table.parameters
                  == null) // failed - maybe theres another entry table in paramTables
              continue;

              // success - initialize other tables parameters with the same name
              for (int j = 0; j < paramTables.length; j++) {
                GribPDSParamTable tab = paramTables[j];
                if (tab.path.equals(table.path)) {
                  tab.parameters = table.parameters;
                }
              }
            }
            return table;
          }
        }
      }
    }

    // Try with the  wild cards
    if (number != -1) {
      return readParameterTable(center, subcenter, -1, false);

    } else if (subcenter != -1) {
      logger.warn(
          "GribPDSParamTable: Could not find table for center:"
              + center
              + " subcenter:"
              + subcenter
              + " number:"
              + wmoTable);
      return readParameterTable(center, -1, -1, false);

    } else if (center != -1) {
      // return readParameterTable(-1, -1, -1, false);
      return readParameterTable(-1, -1, wmoTable, false);
    }

    return null;
  }
Ejemplo n.º 20
0
  public boolean readIndex(RandomAccessFile raf) {
    gc.setRaf(raf); // LOOK leaving the raf open in the GribCollection
    try {
      raf.order(RandomAccessFile.BIG_ENDIAN);
      raf.seek(0);

      //// header message
      if (!NcStream.readAndTest(raf, MAGIC_START.getBytes())) {
        logger.error("GribCollection {}: invalid index", gc.getName());
        return false;
      }

      int v = raf.readInt();
      if (v != getVersion()) {
        logger.warn(
            "GribCollection {}: index found version={}, want version= {} on file {}",
            new Object[] {gc.getName(), v, version, raf.getLocation()});
        return false;
      }

      long skip = raf.readLong();
      raf.skipBytes(skip);

      int size = NcStream.readVInt(raf);
      if ((size < 0) || (size > 100 * 1000 * 1000)) {
        logger.warn("GribCollection {}: invalid index ", gc.getName());
        return false;
      }

      byte[] m = new byte[size];
      raf.readFully(m);

      GribCollectionProto.GribCollectionIndex proto =
          GribCollectionProto.GribCollectionIndex.parseFrom(m);

      gc.center = proto.getCenter();
      gc.subcenter = proto.getSubcenter();
      gc.master = proto.getMaster();
      gc.local = proto.getLocal();
      gc.genProcessType = proto.getGenProcessType();
      gc.genProcessId = proto.getGenProcessId();
      gc.backProcessId = proto.getBackProcessId();
      gc.local = proto.getLocal();
      // gc.tables = Grib2Tables.factory(gc.center, gc.subcenter, gc.master, gc.local);

      gc.filenames = new ArrayList<String>(proto.getFilesCount());
      for (int i = 0; i < proto.getFilesCount(); i++) gc.filenames.add(proto.getFiles(i));

      // error condition on a GribCollection Index
      if ((proto.getFilesCount() == 0) && !(this instanceof TimePartitionBuilder)) {
        logger.warn("GribCollection {}: has no files, force recreate ", gc.getName());
        return false;
      }

      gc.groups = new ArrayList<GribCollection.GroupHcs>(proto.getGroupsCount());
      for (int i = 0; i < proto.getGroupsCount(); i++)
        gc.groups.add(readGroup(proto.getGroups(i), gc.makeGroup()));
      Collections.sort(gc.groups);

      gc.params = new ArrayList<Parameter>(proto.getParamsCount());
      for (int i = 0; i < proto.getParamsCount(); i++) gc.params.add(readParam(proto.getParams(i)));

      if (!readPartitions(proto)) {
        logger.warn("TimePartition {}: has no partitions, force recreate ", gc.getName());
        return false;
      }

      return true;

    } catch (Throwable t) {
      logger.error("Error reading index " + raf.getLocation(), t);
      return false;
    }
  }
Ejemplo n.º 21
0
  private void makeCoordinateDataWithMissing(
      int datatype,
      Variable time,
      Variable elev,
      Variable azi,
      Variable nradialsVar,
      Variable ngatesVar,
      List groups) {

    Array timeData = Array.factory(time.getDataType().getPrimitiveClassType(), time.getShape());
    Index timeIndex = timeData.getIndex();

    Array elevData = Array.factory(elev.getDataType().getPrimitiveClassType(), elev.getShape());
    Index elevIndex = elevData.getIndex();

    Array aziData = Array.factory(azi.getDataType().getPrimitiveClassType(), azi.getShape());
    Index aziIndex = aziData.getIndex();

    Array nradialsData =
        Array.factory(nradialsVar.getDataType().getPrimitiveClassType(), nradialsVar.getShape());
    IndexIterator nradialsIter = nradialsData.getIndexIterator();

    Array ngatesData =
        Array.factory(ngatesVar.getDataType().getPrimitiveClassType(), ngatesVar.getShape());
    IndexIterator ngatesIter = ngatesData.getIndexIterator();

    // first fill with missing data
    IndexIterator ii = timeData.getIndexIterator();
    while (ii.hasNext()) ii.setIntNext(MISSING_INT);

    ii = elevData.getIndexIterator();
    while (ii.hasNext()) ii.setFloatNext(MISSING_FLOAT);

    ii = aziData.getIndexIterator();
    while (ii.hasNext()) ii.setFloatNext(MISSING_FLOAT);

    // now set the  coordinate variables from the Cinrad2Record radial
    int last_msecs = Integer.MIN_VALUE;
    int nscans = groups.size();
    try {
      for (int scan = 0; scan < nscans; scan++) {
        List scanGroup = (List) groups.get(scan);
        int nradials = scanGroup.size();

        Cinrad2Record first = null;
        for (int j = 0; j < nradials; j++) {
          Cinrad2Record r = (Cinrad2Record) scanGroup.get(j);
          if (first == null) first = r;

          int radial = r.radial_num - 1;
          timeData.setInt(timeIndex.set(scan, radial), r.data_msecs);
          elevData.setFloat(elevIndex.set(scan, radial), r.getElevation());
          aziData.setFloat(aziIndex.set(scan, radial), r.getAzimuth());

          if (r.data_msecs < last_msecs)
            logger.warn("makeCoordinateData time out of order " + r.data_msecs);
          last_msecs = r.data_msecs;
        }

        nradialsIter.setIntNext(nradials);
        ngatesIter.setIntNext(first.getGateCount(datatype));
      }
    } catch (java.lang.ArrayIndexOutOfBoundsException ae) {
      logger.debug("Cinrad2IOSP.uncompress ", ae);
    }
    time.setCachedData(timeData, false);
    elev.setCachedData(elevData, false);
    azi.setCachedData(aziData, false);
    nradialsVar.setCachedData(nradialsData, false);
    ngatesVar.setCachedData(ngatesData, false);
  }
Ejemplo n.º 22
0
  private void makeCoordinateData(
      int datatype,
      Variable time,
      Variable elev,
      Variable azi,
      Variable nradialsVar,
      Variable ngatesVar,
      List groups) {

    Array timeData = Array.factory(time.getDataType().getPrimitiveClassType(), time.getShape());
    IndexIterator timeDataIter = timeData.getIndexIterator();

    Array elevData = Array.factory(elev.getDataType().getPrimitiveClassType(), elev.getShape());
    IndexIterator elevDataIter = elevData.getIndexIterator();

    Array aziData = Array.factory(azi.getDataType().getPrimitiveClassType(), azi.getShape());
    IndexIterator aziDataIter = aziData.getIndexIterator();

    Array nradialsData =
        Array.factory(nradialsVar.getDataType().getPrimitiveClassType(), nradialsVar.getShape());
    IndexIterator nradialsIter = nradialsData.getIndexIterator();

    Array ngatesData =
        Array.factory(ngatesVar.getDataType().getPrimitiveClassType(), ngatesVar.getShape());
    IndexIterator ngatesIter = ngatesData.getIndexIterator();

    int last_msecs = Integer.MIN_VALUE;
    int nscans = groups.size();
    int maxRadials = volScan.getMaxRadials();
    for (int i = 0; i < nscans; i++) {
      List scanGroup = (List) groups.get(i);
      int nradials = scanGroup.size();

      Cinrad2Record first = null;
      for (int j = 0; j < nradials; j++) {
        Cinrad2Record r = (Cinrad2Record) scanGroup.get(j);
        if (first == null) first = r;

        timeDataIter.setIntNext(r.data_msecs);
        elevDataIter.setFloatNext(r.getElevation());
        aziDataIter.setFloatNext(r.getAzimuth());

        if (r.data_msecs < last_msecs)
          logger.warn("makeCoordinateData time out of order " + r.data_msecs);
        last_msecs = r.data_msecs;
      }

      for (int j = nradials; j < maxRadials; j++) {
        timeDataIter.setIntNext(MISSING_INT);
        elevDataIter.setFloatNext(MISSING_FLOAT);
        aziDataIter.setFloatNext(MISSING_FLOAT);
      }

      nradialsIter.setIntNext(nradials);
      ngatesIter.setIntNext(first.getGateCount(datatype));
    }

    time.setCachedData(timeData, false);
    elev.setCachedData(elevData, false);
    azi.setCachedData(aziData, false);
    nradialsVar.setCachedData(nradialsData, false);
    ngatesVar.setCachedData(ngatesData, false);
  }