public void init() throws javax.servlet.ServletException { super.init(); org.slf4j.Logger logServerStartup = org.slf4j.LoggerFactory.getLogger("serverStartup"); logServerStartup.info( getClass().getName() + " initialization start - " + UsageLog.setupNonRequestContext()); this.ascLimit = ThreddsConfig.getInt("Opendap.ascLimit", ascLimit); this.binLimit = ThreddsConfig.getInt("Opendap.binLimit", binLimit); this.odapVersionString = ThreddsConfig.get("Opendap.serverVersion", odapVersionString); logServerStartup.info( getClass().getName() + " version= " + odapVersionString + " ascLimit = " + ascLimit + " binLimit = " + binLimit); // debugging actions makeDebugActions(); logServerStartup.info( getClass().getName() + " initialization done - " + UsageLog.closingMessageNonRequestContext()); }
/** * A wrapper around a {@link HttpServletResponse} which attempts to detect the type of output * acquired from the servlet chain and apply a stylesheet to it if all conditions mentioned in * {@link XSLTFilter} are met. */ final class XSLTFilterServletResponse extends HttpServletResponseWrapper { private static final Logger log = org.slf4j.LoggerFactory.getLogger(XSLTFilterServletResponse.class); /** * If true, the stream will be passed verbatim to the next filter. This usually happens when the * output has a mime type different than <code>text/xml</code>. */ private boolean passthrough; /** The actual {@link HttpServletResponse}. */ private HttpServletResponse origResponse = null; /** The actual {@link HttpServletRequest}. */ private HttpServletRequest origRequest; /** * The {@link ServletOutputStream} returned from {@link #getOutputStream()} or <code>null</code>. */ private ServletOutputStream stream = null; /** The {@link PrintWriter} returned frmo {@link #getWriter()} or <code>null</code>. */ private PrintWriter writer = null; /** A pool of stylesheets used for XSLT processing. */ private TemplatesPool transformers; /** Servlet context for resolving local paths. */ private ServletContext context; /** * Creates an XSLT filter servlet response for a single request, wrapping a given {@link * HttpServletResponse}. * * @param response The original chain's {@link HttpServletResponse}. * @param request The original chain's {@link HttpServletRequest}. * @param transformers A pool of transformers to be used with this request. */ public XSLTFilterServletResponse( HttpServletResponse response, HttpServletRequest request, ServletContext context, TemplatesPool transformers) { super(response); this.origResponse = response; this.transformers = transformers; this.origRequest = request; this.context = context; } /** We override this method to detect XML data streams. */ public void setContentType(String contentType) { // Check if XSLT processing has been suppressed for this request. final boolean processingSuppressed = processingSuppressed(origRequest); if (processingSuppressed) { // Processing is suppressed. log.debug("XSLT processing disabled for the request."); } if (!processingSuppressed && (contentType.startsWith("text/xml") || contentType.startsWith("application/xml"))) { /* * We have an XML data stream. Set the real response to proper content type. * TODO: Should we make the encoding a configurable setting? */ origResponse.setContentType("text/html; charset=UTF-8"); } else { /* * The input is something we won't process anyway, so simply passthrough all * data directly to the output stream. */ if (!processingSuppressed) { log.info( "Content type is not text/xml or application/xml (" + contentType + "), passthrough."); } origResponse.setContentType(contentType); passthrough = true; // If the output stream is already initialized, passthrough everything. if (stream != null && stream instanceof DeferredOutputStream) { try { ((DeferredOutputStream) stream).passthrough(origResponse.getOutputStream()); } catch (IOException e) { ((DeferredOutputStream) stream).setExceptionOnNext(e); } } } } /** * Return <code>true</code> if the original request contained XSLT suppressing key. * * @see XSLTFilterConstants#NO_XSLT_PROCESSING */ private boolean processingSuppressed(HttpServletRequest origRequest2) { return (origRequest.getAttribute(XSLTFilterConstants.NO_XSLT_PROCESSING) != null) | (origRequest.getParameter(XSLTFilterConstants.NO_XSLT_PROCESSING) != null); } /** We do not delegate content length because it will most likely change. */ public void setContentLength(final int length) { log.debug("Original content length (ignored): " + length); } /** Flush the internal buffers. This only works if XSLT transformation is suppressed. */ public void flushBuffer() throws IOException { this.stream.flush(); } /** * Return the byte output stream for this response. This is either the original stream or a * buffered stream. * * @exception IllegalStateException Thrown when character stream has been already initialized * ({@link #getWriter()}). */ public ServletOutputStream getOutputStream() throws IOException { if (writer != null) { throw new IllegalStateException( "Character stream has been already initialized. Use streams consequently."); } if (stream != null) { return stream; } if (passthrough) { stream = origResponse.getOutputStream(); } else { stream = new DeferredOutputStream(); } return stream; } /** * Return the character output stream for this response. This is either the original stream or a * buffered stream. * * @exception IllegalStateException Thrown when byte stream has been already initialized ({@link * #getOutputStream()}). */ public PrintWriter getWriter() throws IOException { if (stream != null) { throw new IllegalStateException( "Byte stream has been already initialized. Use streams consequently."); } if (writer != null) { return writer; } if (passthrough) { writer = this.origResponse.getWriter(); return writer; } /* * TODO: The character encoding should be extracted in {@link #setContentType()}, * saved somewhere locally and used here. The response's character encoding may be * different (depends on the stylesheet). */ final String charEnc = origResponse.getCharacterEncoding(); this.stream = new DeferredOutputStream(); if (charEnc != null) { writer = new PrintWriter(new OutputStreamWriter(stream, charEnc)); } else { writer = new PrintWriter(stream); } return writer; } /** * This method must be invoked at the end of processing. The streams are closed and their content * is analyzed. Actual XSLT processing takes place here. */ @SuppressWarnings("unchecked") void finishResponse() throws IOException { if (writer != null) { writer.close(); } else { if (stream != null) stream.close(); } /* * If we're not in passthrough mode, then we need to finalize XSLT transformation. */ if (false == passthrough) { if (stream != null) { final byte[] bytes = ((DeferredOutputStream) stream).getBytes(); final boolean processingSuppressed = (origRequest.getAttribute(XSLTFilterConstants.NO_XSLT_PROCESSING) != null) | (origRequest.getParameter(XSLTFilterConstants.NO_XSLT_PROCESSING) != null); if (processingSuppressed) { // Just copy the buffered data to the output directly. final OutputStream os = origResponse.getOutputStream(); os.write(bytes); os.close(); } else { // Otherwise apply XSLT transformation to it. try { processWithXslt( bytes, (Map<String, Object>) origRequest.getAttribute(XSLTFilterConstants.XSLT_PARAMS_MAP), origResponse); } catch (TransformerException e) { final Throwable t = unwrapCause(e); if (t instanceof IOException) { throw (IOException) t; } filterError("Error applying stylesheet.", e); } } } } } /** Unwraps original throwable from the transformer/ SAX stack. */ private Throwable unwrapCause(TransformerException e) { Throwable t; if (e.getException() != null) { t = e.getException(); } else if (e.getCause() != null) { t = e.getCause(); } else { return e; } do { if (t instanceof IOException) { // break early on IOException return t; } else if (t.getCause() != null) { t = t.getCause(); } else if (t instanceof SAXException && ((SAXException) t).getException() != null) { t = ((SAXException) t).getException(); } else { return t; } } while (true); } /** * Process the byte array (input XML) with the XSLT stylesheet and push the result to the output * stream. */ private void processWithXslt( byte[] bytes, final Map<String, Object> stylesheetParams, final HttpServletResponse response) throws TransformerConfigurationException, TransformerException, IOException { final TransformingDocumentHandler docHandler; try { final XMLReader reader = XMLReaderFactory.createXMLReader(); docHandler = new TransformingDocumentHandler(origRequest, context, stylesheetParams, transformers); docHandler.setContentTypeListener( new IContentTypeListener() { public void setContentType(String contentType, String encoding) { if (encoding == null) { response.setContentType(contentType); } else { response.setContentType(contentType + "; charset=" + encoding); } try { docHandler.setTransformationResult(new StreamResult(response.getOutputStream())); } catch (IOException e) { throw new RuntimeException("Could not open output stream."); } } }); reader.setContentHandler(docHandler); try { reader.parse(new InputSource(new ByteArrayInputStream(bytes))); } finally { docHandler.cleanup(); } } catch (SAXException e) { final Exception nested = e.getException(); if (nested != null) { if (nested instanceof IOException) { throw (IOException) nested; } else if (nested instanceof TransformerException) { throw (TransformerException) nested; } } throw new TransformerException("Input parsing exception.", e); } } /** * Attempts to send an internal server error HTTP error, if possible. Otherwise simply pushes the * exception message to the output stream. * * @param message Message to be printed to the logger and to the output stream. * @param t Exception that caused the error. */ protected void filterError(String message, Throwable t) { log.error("XSLT filter error: " + message, t); if (false == origResponse.isCommitted()) { // Reset the buffer and previous status code. origResponse.reset(); origResponse.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); origResponse.setContentType("text/html; charset=UTF-8"); } // Response committed. Just push the error to the output stream. try { final OutputStream os = origResponse.getOutputStream(); final PrintWriter osw = new PrintWriter(new OutputStreamWriter(os, "iso8859-1")); osw.write("<html><body><!-- " + XSLTFilterConstants.ERROR_TOKEN + " -->"); osw.write("<h1 style=\"color: red; margin-top: 1em;\">"); osw.write("Internal server exception"); osw.write("</h1>"); osw.write("<b>URI</b>: " + origRequest.getRequestURI() + "\n<br/><br/>"); serializeException(osw, t); if (t instanceof ServletException && ((ServletException) t).getRootCause() != null) { osw.write("<br/><br/><h2>ServletException root cause:</h2>"); serializeException(osw, ((ServletException) t).getRootCause()); } osw.write("</body></html>"); osw.flush(); } catch (IOException e) { // Not much to do in such case (connection broken most likely). log.debug("Filter error could not be returned to client."); } } /** Utility method to serialize an exception and its stack trace to simple HTML. */ private final void serializeException(PrintWriter osw, Throwable t) { osw.write("<b>Exception</b>: " + t.toString() + "\n<br/><br/>"); osw.write("<b>Stack trace:</b>"); osw.write( "<pre style=\"margin: 1px solid red; padding: 3px; font-family: sans-serif; font-size: small;\">"); t.printStackTrace(osw); osw.write("</pre>"); } /** */ private void detectErrorResponse(int errorCode) { if (errorCode != HttpServletResponse.SC_ACCEPTED) { origRequest.setAttribute(XSLTFilterConstants.NO_XSLT_PROCESSING, Boolean.TRUE); } } /** */ public void sendError(int errorCode) throws IOException { detectErrorResponse(errorCode); super.sendError(errorCode); } /** */ public void sendError(int errorCode, String message) throws IOException { detectErrorResponse(errorCode); super.sendError(errorCode, message); } /** */ public void setStatus(int statusCode) { detectErrorResponse(statusCode); super.setStatus(statusCode); } /** */ public void setStatus(int statusCode, String message) { detectErrorResponse(statusCode); super.setStatus(statusCode, message); } }
/** * Created by 4535992 on 21/04/2015. * * @version 2015-06-25 */ @SuppressWarnings("unused") public class BeansKit implements org.springframework.context.ResourceLoaderAware, BeanPostProcessor { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BeansKit.class); private static final BeansKit instance = new BeansKit(); private BeansKit() {} public static BeansKit getInstance() { return instance; } private ResourceLoader resourceLoader; public static <T> T getBeanFromContext( String nameOfBean, Class<T> requiredType, AbstractApplicationContext context) { T obj = context.getBean(nameOfBean, requiredType); context.registerShutdownHook(); return obj; } public static <T> T getBeanFromContext( String nameOfBean, Class<T> requiredType, ApplicationContext context) { // retrieve configured instance return context.getBean(nameOfBean, requiredType); } public static ApplicationContext tryGetContextSpring(String filePathXml, Class<?> thisClass) throws IOException { return loadApplicationContextSpring(thisClass, filePathXml); } private static ApplicationContext loadApplicationContextSpring( Class<?> thisClass, String... filePaths) { ApplicationContext context = new GenericApplicationContext(); // Clean the path for load the context... for (int i = 0; i < filePaths.length; i++) { if (filePaths[i].startsWith(File.separator) || filePaths[i].startsWith("/")) { filePaths[i] = filePaths[i].substring(1, filePaths[i].length()); } } // This container loads the definitions of the beans from an XML file. // Here you do not need to provide the full path of the XML file but // you need to set CLASSPATH properly because this container will look // bean configuration XML file in CLASSPATH. // You can force with the fileSystem using "file:" instead of "classpath:". try { context = new ClassPathXmlApplicationContext(filePaths, true); } catch (Exception e0) { try { if (e0.getCause().getMessage().contains("has already been set")) { logger.warn(e0.getMessage() + "->" + e0.getCause()); } } catch (java.lang.NullPointerException e) { /*do nothing*/ } try { context = new ClassPathXmlApplicationContext(filePaths, true); } catch (Exception e1) { if (thisClass != null) { try { context = new ClassPathXmlApplicationContext(filePaths, thisClass); } catch (Exception e2) { try { // This container loads the definitions of the beans from an XML file. // Here you need to provide the full path of the XML bean configuration file to the // constructor. // You can force with file: property to the class file. List<String> files = new ArrayList<>(); for (String spath : filePaths) { Path path = getResourceAsFile(spath, thisClass).toPath(); if (Files.exists(path) && path.toRealPath() != null) { files.add(path.toAbsolutePath().toString()); } else { logger.warn("The resource with path:" + path.toString() + " not exists"); } } if (!files.isEmpty()) { context = new FileSystemXmlApplicationContext( files.toArray(new String[files.size()]), true); } else { logger.warn("The paths used are reference 0 resources return NULL value"); return null; } } catch (Exception e3) { logger.error(e3.getMessage(), e3); } } } else { logger.error(e1.getMessage(), e1); } } } return context; } public static ApplicationContext tryGetContextSpring(String[] filesPathsXml, Class<?> thisClass) throws IOException { String[] paths = new String[filesPathsXml.length]; int i = 0; for (String path : filesPathsXml) { if (new File(path).exists()) { // String path = toStringUriWithPrefix(getResourceAsFile(spath, thisClass)); paths[i] = path; i++; } } return loadApplicationContextSpring(thisClass, paths); } /*public static String getResourceAsString(String fileName,Class<?> thisClass) { String result; try { result = org.apache.commons.io.IOUtils.toString(thisClass.getClassLoader().getResourceAsStream(fileName)); return result; } catch (IOException e) { logger.error(e.getMessage(),e); return null; } }*/ private ClassLoader getThisClassLoader() { return this.getClass().getClassLoader(); } public static File getResourceAsFile(String name, Class<?> thisClass) { ClassLoader classLoader = thisClass.getClassLoader(); try { //noinspection ConstantConditions // String path = classLoader.getResource("").getPath(); // ///C:/Users/tenti/Desktop/Projects/gate-basic/target/test-classes/ return new File(classLoader.getResource(name).getFile()); } catch (NullPointerException e) { try { // return new // File(Thread.currentThread().getContextClassLoader().getResource(name).getFile()); // ClassLoader classLoader = Config.class.getClassLoader(); // URL resource = classLoader.getResource(name); // String path = Paths.get(resource.toURI()).toAbsolutePath().toString(); File file = null; URL url = null; if (classLoader != null) { url = classLoader.getResource(name); } if (url == null) { url = ClassLoader.getSystemResource(name); } if (url != null) { try { file = new File(url.toURI()); } catch (URISyntaxException e5) { file = new File(url.getPath()); } } if (file == null) { file = getResourceAsFileFromBuildFolder(name); } // return new File(path); return file; } catch (NullPointerException | IOException e2) { logger.error(e2.getMessage(), e2); return new File(""); } } } private static File getResourceAsFileFromBuildFolder(String name) throws IOException { String path = System.getProperty("user.dir") + File.separator + "build" + File.separator + "resources" + File.separator + "main" + File.separator + name.replace("/", File.separator); return new File(path); } /* * @param the {@link String} path to the resource * @href https://discuss.gradle.org/t/getresourceasstream-returns-null-in-plugin-in-daemon-mode/2385/7 * @return the {@link File} resource. */ /*public static File getResourceURLAsFile(String resource) throws IOException{ new URLConnection(new URL("file:///")) { { setDefaultUseCaches(false); } @Override public void connect() throws IOException { } } return new File() }*/ public static File getResourceSpringAsFile(String pathRelativeToFileOnResourceFolder) { try { //noinspection ConstantConditions return getResourceSpringAsResource(pathRelativeToFileOnResourceFolder, null, null).getFile(); } catch (IOException e) { logger.error(e.getMessage(), e); return null; } } public static File getResourceSpringAsFile( String pathRelativeToFileOnResourceFolder, Class<?> clazz) { try { //noinspection ConstantConditions return getResourceSpringAsResource(pathRelativeToFileOnResourceFolder, clazz, null).getFile(); } catch (IOException e) { logger.error(e.getMessage(), e); return null; } } public static File getResourceSpringAsFile( String pathRelativeToFileOnResourceFolder, ClassLoader classLoader) { try { //noinspection ConstantConditions return getResourceSpringAsResource(pathRelativeToFileOnResourceFolder, null, classLoader) .getFile(); } catch (IOException e) { logger.error(e.getMessage(), e); return null; } } public static String getResourceSpringAsString(String pathRelativeToFileOnResourceFolder) { //noinspection ConstantConditions return readResource( getResourceSpringAsResource(pathRelativeToFileOnResourceFolder, null, null)); } /** * Method to get a resource. href: * http://howtodoinjava.com/spring/spring-core/how-to-load-external-resources-files-into-spring-context/ * * @param fileNameOrUri the {@link Object} to convert to {@link Resource} must be a {@link * File},{@link URI},{@link URL},{@link Path},{@link String},{@link InputStream} * @param clazz the {@link Class} for reference to the resource folder. * @param classLoader the {@link ClassLoader} for load the resources. * @return the {@link Resource}. */ private static Resource getResourceSpringAsResource( Object fileNameOrUri, @Nullable Class<?> clazz, @Nullable ClassLoader classLoader) { try { Resource yourfile; // if File if (fileNameOrUri instanceof File && ((File) fileNameOrUri).exists()) { yourfile = new org.springframework.core.io.FileSystemResource(((File) fileNameOrUri)); } // if URL else if (org.springframework.util.ResourceUtils.isUrl(String.valueOf(fileNameOrUri)) || fileNameOrUri instanceof URL) { if (fileNameOrUri instanceof URL) { yourfile = new org.springframework.core.io.UrlResource((URL) fileNameOrUri); } else { yourfile = new org.springframework.core.io.UrlResource(String.valueOf(fileNameOrUri)); } // if Path or URI } else if (fileNameOrUri instanceof Path || fileNameOrUri instanceof URI) { if (fileNameOrUri instanceof Path && Files.exists((Path) fileNameOrUri)) { yourfile = new org.springframework.core.io.PathResource((Path) fileNameOrUri); } else { yourfile = new org.springframework.core.io.PathResource((URI) fileNameOrUri); } /* }else if(fileNameOrUri instanceof Class){ org.springframework.core.io.ClassRelativeResourceLoader relativeResourceLoader = new org.springframework.core.io.ClassRelativeResourceLoader((Class<?>) fileNameOrUri); yourfile = relativeResourceLoader.getResource("") */ // if InputStream } else if (fileNameOrUri instanceof InputStream) { yourfile = new org.springframework.core.io.InputStreamResource((InputStream) fileNameOrUri); } else if (fileNameOrUri instanceof byte[]) { yourfile = new org.springframework.core.io.ByteArrayResource((byte[]) fileNameOrUri); // if String path toa file or String of a URI } else if (fileNameOrUri instanceof String) { if (classLoader != null) { yourfile = new org.springframework.core.io.ClassPathResource( String.valueOf(fileNameOrUri), classLoader); } else if (clazz != null) { yourfile = new org.springframework.core.io.ClassPathResource( String.valueOf(fileNameOrUri), clazz); } else { yourfile = new org.springframework.core.io.ClassPathResource(String.valueOf(fileNameOrUri)); } } else { logger.error( "Can't load the resource for the Object with Class:" + fileNameOrUri.getClass().getName()); return null; } return yourfile; } catch (IOException e) { logger.error(e.getMessage(), e); return null; } } public static String readResource(Resource resource) { try { /* org.springframework.core.io.Resource resource = new org.springframework.core.io.ClassPathResource(fileLocationInClasspath);*/ BufferedReader br = new BufferedReader(new InputStreamReader(resource.getInputStream()), 1024); StringBuilder stringBuilder = new StringBuilder(); String line; while ((line = br.readLine()) != null) { stringBuilder.append(line).append('\n'); } br.close(); return stringBuilder.toString(); } catch (Exception e) { logger.error(e.getMessage(), e); return null; } } public String readResource(String absolutePathToFile, ResourceLoader resourceLoader) throws IOException { // This line will be changed for all versions of other examples : // "file:c:/temp/filesystemdata.txt" Resource banner = resourceLoader.getResource("file:" + absolutePathToFile); InputStream in = banner.getInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); StringBuilder sb = new StringBuilder(); while (true) { String line = reader.readLine(); if (line == null) break; sb.append(line).append(System.getProperty("line.separator")); } reader.close(); return sb.toString(); } @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } /** * Method to get a resource from relative path with spring. * * @param location the {@link String} location of the file on the resource folders e.g. * "location.txt". * @return the {@link Resource} of Spring core io. */ public Resource getResource(String location) { return resourceLoader.getResource(location); } /* * Convert filename string to a URI. * Map '\' characters to '/' (this might break if '\' is used in * a Unix filename, but this is assumed to be a very rare occurrence * as '\' is often used with special meaning on Unix.) * For unix-like systems, the absolute filename begins with a '/' and is preceded by "file://". * For other systems an extra '/' must be supplied. * * @param filePath string of the path to the file * @return path to the in uri formato with prefix file:/// */ /*private static String toStringUriWithPrefix(String filePath) { StringBuilder mapFileName = new StringBuilder(filePath); for (int i = 0; i < mapFileName.length(); i++) { if (mapFileName.charAt(i) == '\\') mapFileName.setCharAt(i, '/'); } if (filePath.charAt(0) == '/') return "file://" + mapFileName.toString(); else return "file:///" + mapFileName.toString(); }*/ /* * Method to convert a File to a URI with the prefix file://. * * @param file the File to convert. * @return the String URI with prefix. */ /*public static String toStringUriWithPrefix(File file) { return toStringUriWithPrefix(file.getAbsolutePath()); }*/ // ----------------------------------------------------------------------------------------- /*public static Collection<?> collect(Collection<?> collection, String propertyName) { return org.apache.commons.collections.CollectionUtils.collect(collection, new org.apache.commons.beanutils.BeanToPropertyValueTransformer(propertyName)); }*/ /*public static org.springframework.context.ApplicationContext createApplicationContext(String uri) throws MalformedURLException { org.springframework.core.io.Resource resource = getResourceSpringAsResource(uri,null,null); logger.debug("Using " + resource + " from " + uri); try { return new ResourceAdapterApplicationContext()ResourceXmlApplicationContext(resource) { @Override protected void initBeanDefinitionReader(org.springframework.beans.context.xml.XmlBeanDefinitionReader reader) { reader.setValidating(true); } }; } }*/ @Override public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { System.out.println("BeforeInitialization : " + beanName); return bean; // you can return any other object as well } @Override public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { System.out.println("AfterInitialization : " + beanName); return bean; // you can return any other object as well } }
/** * Helper class for using the netcdf-3 record dimension. * * @author caron * @since Feb 29, 2008 */ public class RecordDatasetHelper { private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(RecordDatasetHelper.class); protected NetcdfDataset ncfile; protected String obsTimeVName, nomTimeVName; protected String latVName, lonVName, zcoordVName, zcoordUnits; protected String stnIdVName, stnIndexVName, stnDescVName; protected StationHelper stationHelper; protected DataType stationIdType; protected StructureDS recordVar; protected Dimension obsDim; protected LatLonRect boundingBox; protected double minDate, maxDate; protected DateUnit timeUnit; protected double altScaleFactor = 1.0; protected Formatter errs = null; protected boolean showErrors = true; /** * Constructor. * * @param ncfile the netccdf file * @param typedDataVariables list of data variables; all record variables will be added to this * list, except . You can remove extra * @param obsTimeVName observation time variable name (required) * @param nomTimeVName nominal time variable name (may be null) * @throws IllegalArgumentException if ncfile has no unlimited dimension and recDimName is null. */ public RecordDatasetHelper( NetcdfDataset ncfile, String obsTimeVName, String nomTimeVName, List<VariableSimpleIF> typedDataVariables, String recDimName, Formatter errBuffer) { this.ncfile = ncfile; this.obsTimeVName = obsTimeVName; this.nomTimeVName = nomTimeVName; this.errs = errBuffer; // check if we already have a structure vs if we have to add it. if (this.ncfile.hasUnlimitedDimension()) { this.ncfile.sendIospMessage(NetcdfFile.IOSP_MESSAGE_ADD_RECORD_STRUCTURE); this.recordVar = (StructureDS) this.ncfile.getRootGroup().findVariable("record"); this.obsDim = ncfile.getUnlimitedDimension(); } else { if (recDimName == null) throw new IllegalArgumentException( "File <" + this.ncfile.getLocation() + "> has no unlimited dimension, specify psuedo record dimension with observationDimension global attribute."); this.obsDim = this.ncfile.getRootGroup().findDimension(recDimName); this.recordVar = new StructurePseudoDS(this.ncfile, null, "record", null, obsDim); } // create member variables List<Variable> recordMembers = ncfile.getVariables(); for (Variable v : recordMembers) { if (v == recordVar) continue; if (v.isScalar()) continue; if (v.getDimension(0) == this.obsDim) typedDataVariables.add(v); } // need the time units Variable timeVar = ncfile.findVariable(obsTimeVName); String timeUnitString = ncfile.findAttValueIgnoreCase(timeVar, CDM.UNITS, "seconds since 1970-01-01"); try { timeUnit = new DateUnit(timeUnitString); } catch (Exception e) { if (null != errs) errs.format("Error on string = %s == %s%n", timeUnitString, e.getMessage()); try { timeUnit = new DateUnit("seconds since 1970-01-01"); } catch (Exception e1) { // cant happen } } } /** * Set extra information used by station obs datasets. Use stnIdVName or stnIndexVName. * * @param stnIdVName the obs variable that is used to find the station in the stnHash; may be type * int or a String (char). * @param stnDescVName optional station var containing station description */ public void setStationInfo( String stnIdVName, String stnDescVName, String stnIndexVName, StationHelper stationHelper) { this.stnIdVName = stnIdVName; this.stnDescVName = stnDescVName; this.stnIndexVName = stnIndexVName; this.stationHelper = stationHelper; if (stnIdVName != null) { Variable stationVar = ncfile.findVariable(stnIdVName); stationIdType = stationVar.getDataType(); } } public void setLocationInfo(String latVName, String lonVName, String zcoordVName) { this.latVName = latVName; this.lonVName = lonVName; this.zcoordVName = zcoordVName; // check for meter conversion if (zcoordVName != null) { Variable v = ncfile.findVariable(zcoordVName); zcoordUnits = ncfile.findAttValueIgnoreCase(v, CDM.UNITS, null); if (zcoordUnits != null) try { altScaleFactor = getMetersConversionFactor(zcoordUnits); } catch (Exception e) { if (errs != null) errs.format("%s", e.getMessage()); } } } // make structure variable names to shortNames so StructureData sdata can // access it members public void setShortNames( String latVName, String lonVName, String altVName, String obsTimeVName, String nomTimeVName) { this.latVName = latVName; this.lonVName = lonVName; this.zcoordVName = altVName; this.obsTimeVName = obsTimeVName; this.nomTimeVName = nomTimeVName; } protected static double getMetersConversionFactor(String unitsString) throws Exception { SimpleUnit unit = SimpleUnit.factoryWithExceptions(unitsString); return unit.convertTo(1.0, SimpleUnit.meterUnit); } public Structure getRecordVar() { return (this.recordVar); } public int getRecordCount() { Dimension unlimitedDim = ncfile.getUnlimitedDimension(); return unlimitedDim.getLength(); } public void setTimeUnit(DateUnit timeUnit) { this.timeUnit = timeUnit; } public DateUnit getTimeUnit() { return this.timeUnit; } public LatLonPoint getLocation(StructureData sdata) { StructureMembers members = sdata.getStructureMembers(); double lat = sdata.convertScalarDouble(members.findMember(latVName)); double lon = sdata.convertScalarDouble(members.findMember(lonVName)); return new LatLonPointImpl(lat, lon); } public double getLatitude(StructureData sdata) { StructureMembers members = sdata.getStructureMembers(); return sdata.convertScalarDouble(members.findMember(latVName)); } public double getLongitude(StructureData sdata) { StructureMembers members = sdata.getStructureMembers(); return sdata.convertScalarDouble(members.findMember(lonVName)); } public double getZcoordinate(StructureData sdata) { StructureMembers members = sdata.getStructureMembers(); return (zcoordVName == null) ? Double.NaN : sdata.convertScalarDouble(members.findMember(zcoordVName)); } public String getZcoordUnits() { return zcoordUnits; } public Date getObservationTimeAsDate(StructureData sdata) { return timeUnit.makeDate(getObservationTime(sdata)); } public double getObservationTime(StructureData sdata) { return getTime(sdata.findMember(obsTimeVName), sdata); } private double getTime(StructureMembers.Member timeVar, StructureData sdata) { if (timeVar == null) return 0.0; if ((timeVar.getDataType() == DataType.CHAR) || (timeVar.getDataType() == DataType.STRING)) { String time = sdata.getScalarString(timeVar); CalendarDate date = CalendarDateFormatter.isoStringToCalendarDate(null, time); if (date == null) { log.error("Cant parse date - not ISO formatted, = " + time); return 0.0; } return date.getMillis() / 1000.0; } else { return sdata.convertScalarDouble(timeVar); } } /* * This reads through all the records in the dataset, and constructs a list of * RecordPointObs or RecordStationObs. It does not cache the data. * <p>If stnIdVName is not null, its a StationDataset, then construct a Station HashMap of StationImpl * objects. Add the RecordStationObs into the list of obs for that station. * * @param cancel allow user to cancel * @return List of RecordPointObs or RecordStationObs * @throws IOException on read error * public List<RecordPointObs> readAllCreateObs(CancelTask cancel) throws IOException { // see if its a station or point dataset boolean hasStations = stnIdVName != null; if (hasStations) stnHash = new HashMap<Object, Station>(); // get min and max date and lat,lon double minDate = Double.MAX_VALUE; double maxDate = -Double.MAX_VALUE; double minLat = Double.MAX_VALUE; double maxLat = -Double.MAX_VALUE; double minLon = Double.MAX_VALUE; double maxLon = -Double.MAX_VALUE; // read all the data, create a RecordObs StructureMembers members = null; List<RecordPointObs> records = new ArrayList<RecordPointObs>(); int recno = 0; Structure.Iterator ii = recordVar.getStructureIterator(); while (ii.hasNext()) { StructureData sdata = ii.next(); if (members == null) members = sdata.getStructureMembers(); Object stationId = null; if (hasStations) { if (stationIdType == DataType.INT) { stationId = sdata.getScalarInt(stnIdVName); } else stationId = sdata.getScalarString(stnIdVName).trim(); } String desc = (stnDescVName == null) ? null : sdata.getScalarString(stnDescVName); double lat = sdata.getScalarDouble(latVName); double lon = sdata.getScalarDouble(lonVName); double alt = (altVName == null) ? 0.0 : altScaleFactor * sdata.getScalarDouble(altVName); double obsTime = sdata.convertScalarDouble(members.findMember(obsTimeVName)); double nomTime = (nomTimeVName == null) ? obsTime : sdata.convertScalarDouble(members.findMember(nomTimeVName)); //double obsTime = sdata.convertScalarDouble( members.findMember( obsTimeVName) ); //double nomTime = (nomTimeVName == null) ? obsTime : sdata.convertScalarDouble( members.findMember( nomTimeVName)); if (hasStations) { Station stn = stnHash.get(stationId); if (stn == null) { stn = new Station(stationId.toString(), desc, lat, lon, alt); stnHash.put(stationId, stn); } RecordStationObs stnObs = new RecordStationObs(stn, obsTime, nomTime, timeUnit, recno); records.add(stnObs); //stn.addObs( stnObs); } else { records.add(new RecordPointObs(new EarthLocation(lat, lon, alt), obsTime, nomTime, timeUnit, recno)); } // track date range and bounding box minDate = Math.min(minDate, obsTime); maxDate = Math.max(maxDate, obsTime); minLat = Math.min(minLat, lat); maxLat = Math.max(maxLat, lat); minLon = Math.min(minLon, lon); maxLon = Math.max(maxLon, lon); recno++; if ((cancel != null) && cancel.isCancel()) return null; } boundingBox = new LatLonRect(new LatLonPointImpl(minLat, minLon), new LatLonPointImpl(maxLat, maxLon)); return records; } /* private boolean debugBB = false; public List getData(ArrayList records, LatLonRect boundingBox, CancelTask cancel) throws IOException { if (debugBB) System.out.println("Want bb= "+boundingBox); ArrayList result = new ArrayList(); for (int i = 0; i < records.size(); i++) { RecordDatasetHelper.RecordPointObs r = (RecordDatasetHelper.RecordPointObs) records.get(i); if (boundingBox.contains(r.getLatLon())) { if (debugBB) System.out.println(" ok latlon= "+r.getLatLon()); result.add( r); } if ((cancel != null) && cancel.isCancel()) return null; } return result; } // return List<PointObsDatatype> public List getData(ArrayList records, LatLonRect boundingBox, double startTime, double endTime, CancelTask cancel) throws IOException { if (debugBB) System.out.println("Want bb= "+boundingBox); ArrayList result = new ArrayList(); for (int i = 0; i < records.size(); i++) { RecordDatasetHelper.RecordPointObs r = (RecordDatasetHelper.RecordPointObs) records.get(i); if (boundingBox.contains(r.getLatLon())) { if (debugBB) System.out.println(" ok latlon= "+r.getLatLon()); double timeValue = r.getObservationTime(); if ((timeValue >= startTime) && (timeValue <= endTime)) result.add( r); } if ((cancel != null) && cancel.isCancel()) return null; } return result; } */ ////////////////////////////////////////////////////////////////////////////////////// public PointFeature factory(StationImpl s, StructureData sdata, int recno) { if (s == null) return new RecordPointObs(sdata, recno); else return new RecordStationObs(s, sdata, recno); } class RecordPointObs extends PointFeatureImpl { protected int recno; protected StructureData sdata; RecordPointObs(int recno) { super(RecordDatasetHelper.this.timeUnit); this.recno = recno; } // Constructor for the case where you keep track of the location, time of each record, but not // the data. protected RecordPointObs( EarthLocation location, double obsTime, double nomTime, DateUnit timeUnit, int recno) { super(location, obsTime, nomTime, timeUnit); this.recno = recno; } // Constructor for when you already have the StructureData and want to wrap it in a // StationObsDatatype protected RecordPointObs(StructureData sdata, int recno) { super(RecordDatasetHelper.this.timeUnit); this.sdata = sdata; this.recno = recno; StructureMembers members = sdata.getStructureMembers(); obsTime = getTime(members.findMember(obsTimeVName), sdata); nomTime = (nomTimeVName == null) ? obsTime : getTime(members.findMember(nomTimeVName), sdata); // this assumes the lat/lon/alt is stored in the obs record double lat = sdata.convertScalarDouble(members.findMember(latVName)); double lon = sdata.convertScalarDouble(members.findMember(lonVName)); double alt = (zcoordVName == null) ? 0.0 : altScaleFactor * sdata.convertScalarDouble(members.findMember(zcoordVName)); location = new EarthLocationImpl(lat, lon, alt); } public String getId() { return Integer.toString(recno); } public LatLonPoint getLatLon() { return new LatLonPointImpl(location.getLatitude(), location.getLongitude()); } public StructureData getFeatureData() throws IOException { if (null == sdata) { try { // deal with files that are updating // LOOK kludge? if (recno > getRecordCount()) { int n = getRecordCount(); ncfile.syncExtend(); log.info( "RecordPointObs.getData recno=" + recno + " > " + n + "; after sync= " + getRecordCount()); } sdata = recordVar.readStructure(recno); } catch (ucar.ma2.InvalidRangeException e) { e.printStackTrace(); throw new IOException(e.getMessage()); } } return sdata; } public ucar.ma2.StructureData getDataAll() throws java.io.IOException { return getFeatureData(); } } ////////////////////////////////////////////////////////////////////////////////////// // a PointObs with the location info stored as a Station class RecordStationObs extends RecordPointObs { private Station station; /** * Constructor for the case where you keep track of the station, time of each record, but the * data reading is deferred. * * @param station data is for this Station * @param obsTime observation time * @param nomTime nominal time (may be NaN) * @param recno data is at this record number */ protected RecordStationObs( Station station, double obsTime, double nomTime, DateUnit timeUnit, int recno) { super(station, obsTime, nomTime, timeUnit, recno); this.station = station; } // Constructor for when you have everything protected RecordStationObs( Station station, double obsTime, double nomTime, StructureData sdata, int recno) { super(recno); this.station = station; this.location = station; this.obsTime = obsTime; this.nomTime = nomTime; this.sdata = sdata; } // Constructor for when you already have the StructureData and Station, and calculate times protected RecordStationObs(Station station, StructureData sdata, int recno) { super(recno); this.station = station; this.location = station; this.sdata = sdata; StructureMembers members = sdata.getStructureMembers(); obsTime = getTime(members.findMember(obsTimeVName), sdata); nomTime = (nomTimeVName == null) ? obsTime : getTime(members.findMember(nomTimeVName), sdata); } // Constructor for when you already have the StructureData, and need to find Station and times protected RecordStationObs(StructureData sdata, int recno, boolean useId) { super(recno); this.recno = recno; this.sdata = sdata; this.timeUnit = RecordDatasetHelper.this.timeUnit; StructureMembers members = sdata.getStructureMembers(); obsTime = getTime(members.findMember(obsTimeVName), sdata); nomTime = (nomTimeVName == null) ? obsTime : getTime(members.findMember(nomTimeVName), sdata); if (useId) { // this assumes the station id/name is stored in the obs record String stationId; if (stationIdType == DataType.INT) { stationId = Integer.toString(sdata.getScalarInt(stnIdVName)); } else stationId = sdata.getScalarString(stnIdVName).trim(); station = stationHelper.getStation(stationId); if (null != errs) errs.format(" cant find station id = <%s> when reading record %d%n", stationId, recno); log.error(" cant find station id = <" + stationId + "> when reading record " + recno); } else { // use a station index List<Station> stations = stationHelper.getStations(); int stationIndex = sdata.getScalarInt(stnIndexVName); if (stationIndex < 0 || stationIndex >= stations.size()) { if (null != errs) errs.format( " cant find station at index =%d when reading record %d%n", stationIndex, recno); log.error( "cant find station at index = " + stationIndex + " when reading record " + recno); } else station = stations.get(stationIndex); } location = station; } } }
/** * THREDDS opendap server. * * @author jcaron * @author Nathan David Potter * @since Apr 27, 2009 (branched) */ public class OpendapServlet extends AbstractServlet { static final String DEFAULTCONTEXTPATH = "/thredds"; static final String GDATASET = "guarded_dataset"; private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(OpendapServlet.class); private boolean allowSessions = false; private boolean allowDeflate = false; // handled by Tomcat private String odapVersionString = "opendap/3.7"; private URI baseURI = null; private int ascLimit = 50; private int binLimit = 500; private boolean debugSession = false; public String getDefaultContextPath() { return DEFAULTCONTEXTPATH; } public void init() throws javax.servlet.ServletException { super.init(); org.slf4j.Logger logServerStartup = org.slf4j.LoggerFactory.getLogger("serverStartup"); logServerStartup.info( getClass().getName() + " initialization start - " + UsageLog.setupNonRequestContext()); this.ascLimit = ThreddsConfig.getInt("Opendap.ascLimit", ascLimit); this.binLimit = ThreddsConfig.getInt("Opendap.binLimit", binLimit); this.odapVersionString = ThreddsConfig.get("Opendap.serverVersion", odapVersionString); logServerStartup.info( getClass().getName() + " version= " + odapVersionString + " ascLimit = " + ascLimit + " binLimit = " + binLimit); // debugging actions makeDebugActions(); logServerStartup.info( getClass().getName() + " initialization done - " + UsageLog.closingMessageNonRequestContext()); } public String getServerVersion() { return this.odapVersionString; } // Servlets that support HTTP GET requests and can quickly determine their last modification time // should // override this method. This makes browser and proxy caches work more effectively, reducing the // load on // server and network resources. protected long getLastModified(HttpServletRequest req) { String query = req.getQueryString(); if (query != null) return -1; String path = req.getPathInfo(); if (path == null) return -1; if (path.endsWith(".asc")) path = path.substring(0, path.length() - 4); else if (path.endsWith(".ascii")) path = path.substring(0, path.length() - 6); else if (path.endsWith(".das")) path = path.substring(0, path.length() - 4); else if (path.endsWith(".dds")) path = path.substring(0, path.length() - 4); else if (path.endsWith(".ddx")) path = path.substring(0, path.length() - 4); else if (path.endsWith(".dods")) path = path.substring(0, path.length() - 5); else if (path.endsWith(".html")) path = path.substring(0, path.length() - 5); else if (path.endsWith(".info")) path = path.substring(0, path.length() - 5); else if (path.endsWith(".opendap")) path = path.substring(0, path.length() - 5); else return -1; // if (null != DatasetHandler.findResourceControl( path)) return -1; // LOOK weird Firefox // beahviour? File file = DataRootHandler.getInstance().getCrawlableDatasetAsFile(path); if ((file != null) && file.exists()) return file.lastModified(); return -1; } ///////////////////////////////////////////////////////////////////////////// public void doGet(HttpServletRequest request, HttpServletResponse response) { log.info("doGet(): " + UsageLog.setupRequestContext(request)); // System.out.printf("opendap doGet: req=%s%n%s%n", ServletUtil.getRequest(request), // ServletUtil.showRequestDetail(this, request)); String path = null; ReqState rs = getRequestState(request, response); try { path = request.getPathInfo(); log.debug("doGet path={}", path); if (thredds.servlet.Debug.isSet("showRequestDetail")) log.debug(ServletUtil.showRequestDetail(this, request)); if (path == null) { log.info( "doGet(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, -1)); response.sendError(HttpServletResponse.SC_NOT_FOUND); return; } if (baseURI == null) { // first time, set baseURI URI reqURI = ServletUtil.getRequestURI(request); // Build base URI from request (rather than hard-coding "/thredds/dodsC/"). String baseUriString = request.getContextPath() + request.getServletPath() + "/"; baseURI = reqURI.resolve(baseUriString); log.debug("doGet(): baseURI was set = {}", baseURI); } if (path.endsWith("latest.xml")) { DataRootHandler.getInstance().processReqForLatestDataset(this, request, response); return; } // Redirect all catalog requests at the root level. if (path.equals("/") || path.equals("/catalog.html") || path.equals("/catalog.xml")) { ServletUtil.sendPermanentRedirect(ServletUtil.getContextPath() + path, request, response); return; } // Make sure catalog requests match a dataRoot before trying to handle. if (path.endsWith("/") || path.endsWith("/catalog.html") || path.endsWith("/catalog.xml")) { if (!DataRootHandler.getInstance().hasDataRootMatch(path)) { log.info( "doGet(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, -1)); response.sendError(HttpServletResponse.SC_NOT_FOUND); return; } if (!DataRootHandler.getInstance().processReqForCatalog(request, response)) log.error( "doGet(): " + UsageLog.closingMessageForRequestContext( ServletUtil.STATUS_FORWARD_FAILURE, -1)); return; } if (rs != null) { String dataSet = rs.getDataSet(); String requestSuffix = rs.getRequestSuffix(); if ((dataSet == null) || dataSet.equals("/") || dataSet.equals("")) { doGetDIR(rs); } else if (requestSuffix.equalsIgnoreCase("blob")) { doGetBLOB(rs); } else if (requestSuffix.equalsIgnoreCase("close")) { doClose(rs); } else if (requestSuffix.equalsIgnoreCase("dds")) { doGetDDS(rs); } else if (requestSuffix.equalsIgnoreCase("das")) { doGetDAS(rs); } else if (requestSuffix.equalsIgnoreCase("ddx")) { doGetDDX(rs); } else if (requestSuffix.equalsIgnoreCase("dods")) { doGetDAP2Data(rs); } else if (requestSuffix.equalsIgnoreCase("asc") || requestSuffix.equalsIgnoreCase("ascii")) { doGetASC(rs); } else if (requestSuffix.equalsIgnoreCase("info")) { doGetINFO(rs); } else if (requestSuffix.equalsIgnoreCase("html") || requestSuffix.equalsIgnoreCase("htm")) { doGetHTML(rs); } else if (requestSuffix.equalsIgnoreCase("ver") || requestSuffix.equalsIgnoreCase("version") || dataSet.equalsIgnoreCase("/version") || dataSet.equalsIgnoreCase("/version/")) { doGetVER(rs); } else if (dataSet.equalsIgnoreCase("/help") || dataSet.equalsIgnoreCase("/help/") || dataSet.equalsIgnoreCase("/" + requestSuffix) || requestSuffix.equalsIgnoreCase("help")) { doGetHELP(rs); } else { sendErrorResponse(response, HttpServletResponse.SC_BAD_REQUEST, "Unrecognized request"); return; } } else { sendErrorResponse(response, HttpServletResponse.SC_BAD_REQUEST, "Unrecognized request"); return; } log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_OK, -1)); // plain ol' 404 } catch (FileNotFoundException e) { sendErrorResponse(response, HttpServletResponse.SC_NOT_FOUND, e.getMessage()); // DAP2Exception bad url } catch (BadURLException e) { log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_BAD_REQUEST, -1)); response.setStatus(HttpServletResponse.SC_BAD_REQUEST); dap2ExceptionHandler(e, rs); // all other DAP2Exception } catch (DAP2Exception de) { int status = (de.getErrorCode() == DAP2Exception.NO_SUCH_FILE) ? HttpServletResponse.SC_NOT_FOUND : HttpServletResponse.SC_BAD_REQUEST; if ((de.getErrorCode() != DAP2Exception.NO_SUCH_FILE) && (de.getErrorMessage() != null)) log.debug(de.getErrorMessage()); log.info(UsageLog.closingMessageForRequestContext(status, -1)); response.setStatus(status); dap2ExceptionHandler(de, rs); // parsing, usually the CE } catch (ParseException pe) { log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_BAD_REQUEST, -1)); response.setStatus(HttpServletResponse.SC_BAD_REQUEST); parseExceptionHandler(pe, response); // 403 - request too big } catch (UnsupportedOperationException e) { sendErrorResponse(response, HttpServletResponse.SC_FORBIDDEN, e.getMessage()); } catch (java.net.SocketException e) { log.info("SocketException: " + e.getMessage(), e); log.info(UsageLog.closingMessageForRequestContext(ServletUtil.STATUS_CLIENT_ABORT, -1)); } catch (IOException e) { String eName = e.getClass().getName(); // dont want compile time dependency on ClientAbortException if (eName.equals("org.apache.catalina.connector.ClientAbortException")) { log.debug("ClientAbortException: " + e.getMessage()); log.info(UsageLog.closingMessageForRequestContext(ServletUtil.STATUS_CLIENT_ABORT, -1)); return; } log.error("path= " + path, e); sendErrorResponse(response, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage()); // everything else } catch (Throwable t) { log.error("path= " + path, t); t.printStackTrace(); sendErrorResponse(response, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.getMessage()); } } public void doGetASC(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); GuardedDataset ds = null; try { ds = getDataset(rs); if (ds == null) return; response.setHeader("XDODS-Server", getServerVersion()); response.setContentType("text/plain"); response.setHeader("Content-Description", "dods-ascii"); log.debug( "Sending OPeNDAP ASCII Data For: " + rs + " CE: '" + rs.getConstraintExpression() + "'"); ServerDDS dds = ds.getDDS(); CEEvaluator ce = new CEEvaluator(dds); ce.parseConstraint(rs); checkSize(dds, true); PrintWriter pw = new PrintWriter(response.getOutputStream()); dds.printConstrained(pw); pw.println("---------------------------------------------"); AsciiWriter writer = new AsciiWriter(); // could be static writer.toASCII(pw, dds, ds); // the way that getDAP2Data works // DataOutputStream sink = new DataOutputStream(bOut); // ce.send(myDDS.getName(), sink, ds); pw.flush(); } finally { // release lock if needed if (ds != null) ds.release(); } } public void doGetDAS(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); GuardedDataset ds = null; try { ds = getDataset(rs); if (ds == null) return; response.setContentType("text/plain"); response.setHeader("XDODS-Server", getServerVersion()); response.setHeader("Content-Description", "dods-das"); OutputStream Out = new BufferedOutputStream(response.getOutputStream()); DAS myDAS = ds.getDAS(); myDAS.print(Out); } finally { // release lock if needed if (ds != null) ds.release(); } } public void doGetDDS(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); GuardedDataset ds = null; try { ds = getDataset(rs); if (null == ds) return; response.setContentType("text/plain"); response.setHeader("XDODS-Server", getServerVersion()); response.setHeader("Content-Description", "dods-dds"); OutputStream out = new BufferedOutputStream(response.getOutputStream()); ServerDDS myDDS = ds.getDDS(); if (rs.getConstraintExpression().equals("")) { // No Constraint Expression? // Send the whole DDS myDDS.print(out); out.flush(); } else { // Otherwise, send the constrained DDS // Instantiate the CEEvaluator and parse the constraint expression CEEvaluator ce = new CEEvaluator(myDDS); ce.parseConstraint(rs); // Send the constrained DDS back to the client PrintWriter pw = new PrintWriter(new OutputStreamWriter(out)); myDDS.printConstrained(pw); pw.flush(); } } finally { // release lock if needed if (ds != null) ds.release(); } } public void doGetDDX(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); GuardedDataset ds = null; try { ds = getDataset(rs); if (null == ds) return; response.setContentType("text/plain"); response.setHeader("XDODS-Server", getServerVersion()); response.setHeader("Content-Description", "dods-ddx"); OutputStream out = new BufferedOutputStream(response.getOutputStream()); ServerDDS myDDS = ds.getDDS(); myDDS.ingestDAS(ds.getDAS()); if (rs.getConstraintExpression().equals("")) { // No Constraint Expression? // Send the whole DDS myDDS.printXML(out); out.flush(); } else { // Otherwise, send the constrained DDS // Instantiate the CEEvaluator and parse the constraint expression CEEvaluator ce = new CEEvaluator(myDDS); ce.parseConstraint(rs); // Send the constrained DDS back to the client PrintWriter pw = new PrintWriter(new OutputStreamWriter(out)); myDDS.printConstrainedXML(pw); pw.flush(); } } finally { // release lock if needed if (ds != null) ds.release(); } } public void doGetBLOB(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); GuardedDataset ds = null; try { ds = getDataset(rs); if (null == ds) return; response.setContentType("application/octet-stream"); response.setHeader("XDODS-Server", getServerVersion()); response.setHeader("Content-Description", "dods-blob"); ServletOutputStream sOut = response.getOutputStream(); OutputStream bOut; DeflaterOutputStream dOut = null; if (rs.getAcceptsCompressed() && allowDeflate) { response.setHeader("Content-Encoding", "deflate"); dOut = new DeflaterOutputStream(sOut); bOut = new BufferedOutputStream(dOut); } else { bOut = new BufferedOutputStream(sOut); } ServerDDS myDDS = ds.getDDS(); CEEvaluator ce = new CEEvaluator(myDDS); ce.parseConstraint(rs); checkSize(myDDS, false); // Send the binary data back to the client DataOutputStream sink = new DataOutputStream(bOut); ce.send(myDDS.getEncodedName(), sink, ds); sink.flush(); // Finish up sending the compressed stuff, but don't // close the stream (who knows what the Servlet may expect!) if (null != dOut) dOut.finish(); bOut.flush(); } finally { // release lock if needed if (ds != null) ds.release(); } } private void doClose(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); HttpServletRequest request = rs.getRequest(); String reqPath = rs.getDataSet(); HttpSession session = request.getSession(); session.removeAttribute(reqPath); // work done in the listener response.setHeader("XDODS-Server", getServerVersion()); // needed by client /* if (path.endsWith(".close")) { closeSession(request, response); response.setContentLength(0); log.info("doGet(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_OK, 0)); return; } // so we need to worry about deleting sessions? session.invalidate(); */ } public void doGetDAP2Data(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); GuardedDataset ds = null; try { ds = getDataset(rs); if (null == ds) return; response.setContentType("application/octet-stream"); response.setHeader("XDODS-Server", getServerVersion()); response.setHeader("Content-Description", "dods-data"); ServletOutputStream sOut = response.getOutputStream(); OutputStream bOut; DeflaterOutputStream dOut = null; if (rs.getAcceptsCompressed() && allowDeflate) { response.setHeader("Content-Encoding", "deflate"); dOut = new DeflaterOutputStream(sOut); bOut = new BufferedOutputStream(dOut); } else { bOut = new BufferedOutputStream(sOut); } ServerDDS myDDS = ds.getDDS(); CEEvaluator ce = new CEEvaluator(myDDS); ce.parseConstraint(rs); checkSize(myDDS, false); // Send the constrained DDS back to the client PrintWriter pw = new PrintWriter(new OutputStreamWriter(bOut)); myDDS.printConstrained(pw); // Send the Data delimiter back to the client pw.flush(); bOut.write("\nData:\n".getBytes()); bOut.flush(); // Send the binary data back to the client DataOutputStream sink = new DataOutputStream(bOut); ce.send(myDDS.getEncodedName(), sink, ds); sink.flush(); // Finish up sending the compressed stuff, but don't // close the stream (who knows what the Servlet may expect!) if (null != dOut) dOut.finish(); bOut.flush(); } finally { // release lock if needed if (ds != null) ds.release(); } } public void doGetVER(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); response.setContentType("text/plain"); response.setHeader("XDODS-Server", getServerVersion()); response.setHeader("Content-Description", "dods-version"); PrintWriter pw = new PrintWriter(new OutputStreamWriter(response.getOutputStream())); pw.println("Server Version: " + getServerVersion()); pw.flush(); } public void doGetHELP(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); response.setContentType("text/html"); response.setHeader("XDODS-Server", getServerVersion()); response.setHeader("Content-Description", "dods-help"); PrintWriter pw = new PrintWriter(new OutputStreamWriter(response.getOutputStream())); printHelpPage(pw); pw.flush(); } public void doGetDIR(ReqState rs) throws Exception { // rather dangerous here, since you can go into an infinite loop // so we're going to insist that there's no suffix HttpServletResponse response = rs.getResponse(); HttpServletRequest request = rs.getRequest(); if ((rs.getRequestSuffix() == null) || (rs.getRequestSuffix().length() == 0)) { ServletUtil.forwardToCatalogServices(request, response); return; } sendErrorResponse(response, 0, "Unrecognized request"); } public void doGetINFO(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); GuardedDataset ds = null; try { ds = getDataset(rs); if (null == ds) return; PrintStream pw = new PrintStream(response.getOutputStream()); response.setHeader("XDODS-Server", getServerVersion()); response.setContentType("text/html"); response.setHeader("Content-Description", "dods-description"); GetInfoHandler di = new GetInfoHandler(); di.sendINFO(pw, ds, rs); } finally { // release lock if needed if (ds != null) ds.release(); } } public void doGetHTML(ReqState rs) throws Exception { HttpServletResponse response = rs.getResponse(); HttpServletRequest request = rs.getRequest(); GuardedDataset ds = null; try { ds = getDataset(rs); if (ds == null) return; response.setHeader("XDODS-Server", getServerVersion()); response.setContentType("text/html"); response.setHeader("Content-Description", "dods-form"); // Utilize the getDDS() method to get a parsed and populated DDS // for this server. ServerDDS myDDS = ds.getDDS(); DAS das = ds.getDAS(); GetHTMLInterfaceHandler2 di = new GetHTMLInterfaceHandler2(); di.sendDataRequestForm(request, response, rs.getDataSet(), myDDS, das); } finally { // release lock if needed if (ds != null) ds.release(); } } /////////////////////////////////////////////////////////////////////////////////////////////// // debugging private void makeDebugActions() { DebugHandler debugHandler = DebugHandler.get("ncdodsServer"); DebugHandler.Action act; act = new DebugHandler.Action("help", "Show help page") { public void doAction(DebugHandler.Event e) { try { doGetHELP(getRequestState(e.req, e.res)); } catch (Exception ioe) { log.error("ShowHelp", ioe); } } }; debugHandler.addAction(act); act = new DebugHandler.Action("version", "Show server version") { public void doAction(DebugHandler.Event e) { e.pw.println(" version= " + getServerVersion()); } }; debugHandler.addAction(act); } public String getServerName() { return this.getClass().getName(); } /*protected ReqState getRequestState(HttpServletRequest request, HttpServletResponse response) { ReqState rs = null; // The url and query strings will come to us in encoded form // (see HTTPmethod.newMethod()) String baseurl = request.getRequestURL().toString(); baseurl = EscapeStrings.escapeURL(baseurl); log.debug("doGet baseurl={}", baseurl); String query = request.getQueryString(); query = EscapeStrings.unescapeURLQuery(query); log.debug("doGet query={}", query); try { rs = new ReqState(request, response, getServletConfig(), getServerName(), baseurl, query); } catch (BadURLException bue) { rs = null; } return rs; }*/ /** * ************************************************************************ Prints the OPeNDAP * Server help page to the passed PrintWriter * * @param pw PrintWriter stream to which to dump the help page. */ private void printHelpPage(PrintWriter pw) { pw.println("<h3>OPeNDAP Server Help</h3>"); pw.println("To access most of the features of this OPeNDAP server, append"); pw.println( "one of the following a eight suffixes to a URL: .das, .dds, .dods, .ddx, .blob, .info,"); pw.println(".ver or .help. Using these suffixes, you can ask this server for:"); pw.println("<dl>"); pw.println("<dt> das </dt> <dd> Dataset Attribute Structure (DAS)</dd>"); pw.println("<dt> dds </dt> <dd> Dataset Descriptor Structure (DDS)</dd>"); pw.println("<dt> dods </dt> <dd> DataDDS object (A constrained DDS populated with data)</dd>"); pw.println("<dt> ddx </dt> <dd> XML version of the DDS/DAS</dd>"); pw.println( "<dt> blob </dt> <dd> Serialized binary data content for requested data set, " + "with the constraint expression applied.</dd>"); pw.println("<dt> info </dt> <dd> info object (attributes, types and other information)</dd>"); pw.println("<dt> html </dt> <dd> html form for this dataset</dd>"); pw.println("<dt> ver </dt> <dd> return the version number of the server</dd>"); pw.println("<dt> help </dt> <dd> help information (this text)</dd>"); pw.println("</dl>"); pw.println("For example, to request the DAS object from the FNOC1 dataset at URI/GSO (a"); pw.println("test dataset) you would appand `.das' to the URL:"); pw.println("http://opendap.gso.url.edu/cgi-bin/nph-nc/data/fnoc1.nc.das."); pw.println("<p><b>Note</b>: Many OPeNDAP clients supply these extensions for you so you don't"); pw.println("need to append them (for example when using interfaces supplied by us or"); pw.println("software re-linked with a OPeNDAP client-library). Generally, you only need to"); pw.println("add these if you are typing a URL directly into a WWW browser."); pw.println("<p><b>Note</b>: If you would like version information for this server but"); pw.println("don't know a specific data file or data set name, use `/version' for the"); pw.println("filename. For example: http://opendap.gso.url.edu/cgi-bin/nph-nc/version will"); pw.println("return the version number for the netCDF server used in the first example. "); pw.println("<p><b>Suggestion</b>: If you're typing this URL into a WWW browser and"); pw.println("would like information about the dataset, use the `.info' extension."); pw.println("<p>If you'd like to see a data values, use the `.html' extension and submit a"); pw.println("query using the customized form."); } // ************************************************************************** /** * ************************************************************************ Prints the Bad URL * Page page to the passed PrintWriter * * @param pw PrintWriter stream to which to dump the bad URL page. */ private void printBadURLPage(PrintWriter pw) { String serverContactName = ThreddsConfig.get("serverInformation.contact.name", "UNKNOWN"); String serverContactEmail = ThreddsConfig.get("serverInformation.contact.email", "UNKNOWN"); pw.println("<h3>Error in URL</h3>"); pw.println("The URL extension did not match any that are known by this"); pw.println("server. Below is a list of the five extensions that are be recognized by"); pw.println("all OPeNDAP servers. If you think that the server is broken (that the URL you"); pw.println("submitted should have worked), then please contact the"); pw.println("administrator of this server [" + serverContactName + "] at: "); pw.println("<a href='mailto:" + serverContactEmail + "'>" + serverContactEmail + "</a><p>"); } /////////////////////////////////////////////////////// // utils /** * @param request * @param response * @return the request state */ protected ReqState getRequestState(HttpServletRequest request, HttpServletResponse response) { ReqState rs = null; // Assume url was encoded String baseurl = request.getRequestURL().toString(); baseurl = EscapeStrings.unescapeURL(baseurl); // Assume query was encoded String query = request.getQueryString(); query = EscapeStrings.unescapeURLQuery(query); log.info(String.format("OpendapServlet: nominal url: %s?%s", baseurl, query)); try { rs = new ReqState(request, response, this, getServerName(), baseurl, query); } catch (Exception bue) { rs = null; } return rs; } private void checkSize(ServerDDS dds, boolean isAscii) throws Exception { long size = computeSize(dds, isAscii); // System.err.printf("total (constrained) size=%s\n", size); log.debug("total (constrained) size={}", size); double dsize = size / (1000 * 1000); double maxSize = isAscii ? ascLimit : binLimit; // Mbytes if (dsize > maxSize) { log.info("Reject request size = {} Mbytes", dsize); throw new UnsupportedOperationException( "Request too big=" + dsize + " Mbytes, max=" + maxSize); } } // Recursively compute size of the dds to be returned private long computeSize(DConstructor ctor, boolean isAscii) throws Exception { long projectsize = 0; // accumulate size of projected variables long othersize = 0; // accumulate size of non-projected variables long fieldsize = 0; int projectedcount = 0; int fieldcount = 0; Enumeration vars = ctor.getVariables(); while (vars.hasMoreElements()) { fieldcount++; BaseType field = (BaseType) vars.nextElement(); fieldsize = computeFieldSize(field, isAscii); // accumulate the field sizes if (field.isProject()) { projectsize += fieldsize; projectedcount++; } else { othersize += fieldsize; } } // Cases to consider: // 1. If all of the fields of this ctor are projected, // then return projectsize // 2. If none of the fields of this ctor are projected, // then return othersize // 3. otherwise, at least one field, but not all, is projected, // => return projectsize; if (projectedcount == fieldcount) return projectsize; else if (projectedcount == 0) return othersize; else { assert (projectedcount > 0 && projectedcount < fieldcount); return projectsize; } } long computeFieldSize(BaseType bt, boolean isAscii) throws Exception { long fieldsize = 0; // Figure out what this field is (e.g. primitive or not) // Somewhat convoluted. if (bt instanceof DConstructor) { // simple struct, seq, or grid => recurse fieldsize = computeSize((DConstructor) bt, isAscii); } else if (bt instanceof DArray) { SDArray da = (SDArray) bt; // Separate structure arrays from primitive arrays if (da.getContainerVar() instanceof DPrimitive) { fieldsize = computeArraySize(da); } else if (da.getContainerVar() instanceof DStructure) { fieldsize = computeSize((DStructure) da.getContainerVar(), isAscii); // recurse } else { // Some kind of problem throw new NoSuchTypeException("Computesize: unexpected type for " + bt.getLongName()); } } else if (bt instanceof DPrimitive) { DPrimitive dp = (DPrimitive) bt; if (dp instanceof DString) { String v = ((DString) dp).getValue(); fieldsize = (v == null ? 0 : v.length()); } else { DataType dtype = DODSNetcdfFile.convertToNCType(bt); fieldsize = dtype.getSize(); } } else { // Some kind of problem throw new NoSuchTypeException("Computesize: unknown type for " + bt.getLongName()); } return fieldsize; } long computeArraySize(SDArray da) throws Exception { assert (da.getContainerVar() instanceof DPrimitive); BaseType base = da.getPrimitiveVector().getTemplate(); DataType dtype = DODSNetcdfFile.convertToNCType(base); int elemSize = dtype.getSize(); int n = da.numDimensions(); List<Range> ranges = new ArrayList<Range>(n); long size = 0; for (int i = 0; i < n; i++) { ranges.add(new Range(da.getStart(i), da.getStop(i), da.getStride(i))); Section s = new Section(ranges); size += s.computeSize() * elemSize; } return size; } /* * *********************** dataset caching *********************************************** */ // any time the server needs access to the dataset, it gets a "GuardedDataset" which allows us to // add caching // optionally, a session may be established, which allows us to reserve the dataset for that // session. protected GuardedDataset getDataset(ReqState preq) throws Exception { HttpServletRequest req = preq.getRequest(); String reqPath = preq.getDataSet(); // see if the client wants sessions boolean acceptSession = false; String s = req.getHeader("X-Accept-Session"); if (s != null && s.equalsIgnoreCase("true") && allowSessions) acceptSession = true; HttpSession session = null; if (acceptSession) { // see if theres already a session established, create one if not session = req.getSession(); if (!session.isNew()) { GuardedDataset gdataset = (GuardedDataset) session.getAttribute(reqPath); if (null != gdataset) { if (debugSession) System.out.printf(" found gdataset %s in session %s %n", reqPath, session.getId()); if (log.isDebugEnabled()) log.debug(" found gdataset " + gdataset + " in session " + session.getId()); return gdataset; } } } NetcdfFile ncd = DatasetHandler.getNetcdfFile(req, preq.getResponse(), reqPath); if (null == ncd) return null; GuardedDataset gdataset = new GuardedDatasetCacheAndClone(reqPath, ncd, acceptSession); // GuardedDataset gdataset = new GuardedDatasetImpl(reqPath, ncd, acceptSession); if (acceptSession) { String cookiePath = req.getRequestURI(); String suffix = "." + preq.getRequestSuffix(); if (cookiePath.endsWith(suffix)) // snip off the suffix cookiePath = cookiePath.substring(0, cookiePath.length() - suffix.length()); session.setAttribute(reqPath, gdataset); session.setAttribute(CookieFilter.SESSION_PATH, cookiePath); // session.setAttribute("dataset", ncd.getLocation()); // for UsageValve // session.setMaxInactiveInterval(30); // 30 second timeout !! if (debugSession) System.out.printf( " added gdataset %s in session %s cookiePath %s %n", reqPath, session.getId(), cookiePath); if (log.isDebugEnabled()) log.debug(" added gdataset " + gdataset + " in session " + session.getId()); } /* else { session = req.getSession(); session.setAttribute("dataset", ncd.getLocation()); // for UsageValve } */ return gdataset; } ////////////////////////////////////////////////////////////////////////////// public void parseExceptionHandler(ParseException pe, HttpServletResponse response) { try { BufferedOutputStream eOut = new BufferedOutputStream(response.getOutputStream()); response.setHeader("Content-Description", "dods-error"); response.setContentType("text/plain"); String msg = pe.getMessage().replace('\"', '\''); DAP2Exception de2 = new DAP2Exception(opendap.dap.DAP2Exception.CANNOT_READ_FILE, msg); de2.print(eOut); } catch (Exception e) { System.err.println("parseExceptionHandler: " + e); } } public void dap2ExceptionHandler(DAP2Exception de, ReqState rs) { rs.getResponse().setHeader("Content-Description", "dods-error"); rs.getResponse().setContentType("text/plain"); try { de.print(rs.getResponse().getOutputStream()); } catch (Exception e) { System.err.println("dap2ExceptionHandler: " + e); } } private void sendErrorResponse(HttpServletResponse response, int errorCode, String errorMessage) { try { log.info(UsageLog.closingMessageForRequestContext(errorCode, -1)); response.setStatus(errorCode); response.setHeader("Content-Description", "dods-error"); response.setContentType("text/plain"); PrintWriter pw = new PrintWriter(response.getOutputStream()); pw.println("Error {"); pw.println(" code = " + errorCode + ";"); pw.println(" message = \"" + errorMessage + "\";"); pw.println("};"); pw.flush(); } catch (Exception e) { System.err.println("sendErrorResponse: " + e); } } }
/** * A class containing static methods which deliver descriptions and names of parameters, levels and * units for byte codes from GRIB records. * * <p>Performs operations related to loading parameter tables stored in files. Through a lookup * table (see readParameterTableLookup) all of the supported Parameter Tables are known. An actual * table is not loaded until a parameter from that center/subcenter/table is loaded. see <a * href="../../../Parameters.txt">Parameters.txt</a> * * <p>For now, the lookup table name is hard coded to "resources/grib/tables/tablelookup.lst" * * @author Capt Richard D. Gonzalez modified by Robb Kambic threadsafe 9/25/08 jcaron see * http://www.ibm.com/developerworks/java/library/j-hashmap.html */ public final class GribPDSParamTable { private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(GribPDSParamTable.class); private static final String RESOURCE_PATH = "resources/grib/tables"; private static final String TABLE_LIST = "tablelookup.lst"; private static final Pattern valid = Pattern.compile("^[a-zA-Z_][a-zA-Z0-9_@:\\.\\-\\+]*$"); private static final Pattern numberFirst = Pattern.compile("^[0-9]"); /** * Added by Richard D. Gonzalez static Array with parameter tables used by the GRIB file (should * only be one, but not actually limited to that - this allows GRIB files to be read that have * more than one center's information in it) */ private static volatile GribPDSParamTable[] paramTables = null; private static Object lock = new Object(); private static boolean debug = false; private static int wmoTable; /** * This is a mapping from (center,subcenter,number)-> Param table for any data that has been * loaded */ private static Map<String, GribPDSParamTable> tableMap = new ConcurrentHashMap<String, GribPDSParamTable>(); static { try { ArrayList<GribPDSParamTable> tables = new ArrayList<GribPDSParamTable>(); String resourceName = RESOURCE_PATH + "/" + TABLE_LIST; readTableEntries(resourceName, tables); paramTables = (GribPDSParamTable[]) tables.toArray(new GribPDSParamTable[tables.size()]); } catch (IOException ioe) { throw new RuntimeException(ioe); } } /** * _more_ * * @param aTableList _more_ * @param aTables _more_ * @return Was read successful * @throws IOException On badness */ private static boolean readTableEntries(String aTableList, ArrayList<GribPDSParamTable> aTables) throws IOException { InputStream inputStream = GribResourceReader.getInputStream(aTableList); if (inputStream == null) { logger.debug("Could not open table file:" + aTableList); return false; } return readTableEntries(inputStream, aTableList, aTables); } /** * Read the table list contained in the input stream * * @param is The input stream * @param aTableList The name of the table list file * @param aTables The list to add the tables into * @return Was successful * @throws IOException On badness */ private static boolean readTableEntries( InputStream is, String aTableList, ArrayList<GribPDSParamTable> aTables) throws IOException { if (is == null) return false; InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); String line; while ((line = br.readLine()) != null) { line = line.trim(); if ((line.length() == 0) || line.startsWith("#")) { continue; } GribPDSParamTable table = new GribPDSParamTable(); String[] tableDefArr = line.split(":"); table.center_id = Integer.parseInt(tableDefArr[0].trim()); table.subcenter_id = Integer.parseInt(tableDefArr[1].trim()); table.table_number = Integer.parseInt(tableDefArr[2].trim()); table.filename = tableDefArr[3].trim(); if (table.filename.startsWith("/") || table.filename.startsWith("\\") || table.filename.startsWith("file:") || table.filename.startsWith("http://")) { table.path = table.filename; } else if (aTableList != null) { table.path = GribResourceReader.getFileRoot(aTableList); if (table.path.equals(aTableList)) { table.path = table.filename; } else { table.path = table.path + "/" + table.filename; } table.path = table.path.replace('\\', '/'); } aTables.add(table); } is.close(); return true; } public static GribPDSParamTable[] getParameterTables() { return paramTables; } /** * Reads in the list of tables available and stores them. Does not actually open the parameter * tables files, nor store the list of parameters, but just stores the file names of the parameter * tables. Parameters for a table are read in when the table is requested (in the * getParameterTable method). * * @param is UserGribTabList as a InputStream * @throws IOException or read error */ public static void addParameterUserLookup(InputStream is) throws IOException { // leave out of lock since it does IO ArrayList<GribPDSParamTable> tables = new ArrayList<GribPDSParamTable>(); if (!readTableEntries(is, null, tables)) { return; } synchronized (lock) { // tmp table stores new user defined tables plus tablelookup.lst table entries GribPDSParamTable[] tmp = new GribPDSParamTable[paramTables.length + tables.size()]; for (int idx = 0; idx < paramTables.length + tables.size(); idx++) { if (idx < tables.size()) { tmp[idx] = (GribPDSParamTable) tables.get(idx); // System.out.println( "usrlookup tables = " + tmp[ idx ].path ); } else { tmp[idx] = paramTables[idx - tables.size()]; // tablelookup.lst entries } } paramTables = tmp; // new copy of the data structure } } /** * Reads in the list of tables available and stores them. Does not actually open the parameter * tables files, nor store the list of parameters, but just stores the file names of the parameter * tables. Parameters for a table are read in when the table is requested (in the * getParameterTable method). * * @param userGribTabList string of userlookup file * @throws IOException if file found but read error * @return true if read ok, false if file not found */ public static boolean addParameterUserLookup(String userGribTabList) throws IOException { // leave out of lock since it does IO ArrayList<GribPDSParamTable> tables = new ArrayList<GribPDSParamTable>(); if (!readTableEntries(userGribTabList, tables)) { // logger.error("could not read:" + userGribTabList); return false; } synchronized (lock) { // tmp table stores new user defined tables plus tablelookup.lst table entries GribPDSParamTable[] tmp = new GribPDSParamTable[paramTables.length + tables.size()]; for (int idx = 0; idx < paramTables.length + tables.size(); idx++) { if (idx < tables.size()) { tmp[idx] = tables.get(idx); // new stuff first } else { tmp[idx] = paramTables[idx - tables.size()]; // old stuff } } paramTables = tmp; // new copy of the data structure } return true; } /** * Looks for the parameter table which matches the center, subcenter and table version from the * tables array. If this is the first time asking for this table, then the parameters for this * table have not been read in yet, so this is done as well. * * @param center - integer from PDS octet 5, representing Center. * @param subcenter - integer from PDS octet 26, representing Subcenter * @param tableVersion - integer from PDS octet 4, representing Parameter Table Version * @return GribPDSParamTable matching center, subcenter, and number * @throws NotSupportedException no table found */ public static GribPDSParamTable getParameterTable(int center, int subcenter, int tableVersion) throws NotSupportedException { String key = center + "_" + subcenter + "_" + tableVersion; if (center == -1) { // non existent table logger.error("GribPDSParamTable: non existent table for center, subcenter, table = " + key); return null; } GribPDSParamTable table = tableMap.get(key); if (table != null) return table; table = readParameterTable(center, subcenter, tableVersion, true); if (table == null) { logger.error("GribPDSParamTable: cannot find table for center, subcenter, table " + key); throw new NotSupportedException( "Could not find a table entry for GRIB file with center: " + center + " subCenter: " + subcenter + " number: " + tableVersion); } tableMap.put(key, table); return table; } /** * Looks for the parameter table which matches the center, subcenter and table version from the * tables array. If this is the first time asking for this table, then the parameters for this * table have not been read in yet, so this is done as well. * * @param center - integer from PDS octet 5, representing Center. * @param subcenter - integer from PDS octet 26, representing Subcenter * @param number - integer from PDS octet 4, representing Parameter Table Version * @param firstTry - Is this the first call or are we trying the wild cards * @return GribPDSParamTable matching center, subcenter, and number */ private static GribPDSParamTable readParameterTable( int center, int subcenter, int number, boolean firstTry) { if (firstTry) wmoTable = number; GribPDSParamTable[] localCopy = paramTables; // thread safe for (GribPDSParamTable table : localCopy) { if (center == table.center_id) { if ((table.subcenter_id == -1) || (subcenter == table.subcenter_id)) { if (number == table.table_number) { // now that this table is being used, check to see if the // parameters for this table have been read in yet. // If not, initialize table and read them in now. if (table.parameters == null) { if (!firstTry) { logger.warn( "GribPDSParamTable: Using default table:" + table.path + " (" + table.center_id + ":" + table.subcenter_id + ":" + table.table_number + ")"); } table.readParameterTable(); if (table.parameters == null) // failed - maybe theres another entry table in paramTables continue; // success - initialize other tables parameters with the same name for (int j = 0; j < paramTables.length; j++) { GribPDSParamTable tab = paramTables[j]; if (tab.path.equals(table.path)) { tab.parameters = table.parameters; } } } return table; } } } } // Try with the wild cards if (number != -1) { return readParameterTable(center, subcenter, -1, false); } else if (subcenter != -1) { logger.warn( "GribPDSParamTable: Could not find table for center:" + center + " subcenter:" + subcenter + " number:" + wmoTable); return readParameterTable(center, -1, -1, false); } else if (center != -1) { // return readParameterTable(-1, -1, -1, false); return readParameterTable(-1, -1, wmoTable, false); } return null; } /** * Munge a description to make it suitable as variable name * * @param description start with this * @return Valid Description */ private static String makeValidDesc(String description) { description = description.replaceAll("\\s+", "_"); if (valid.matcher(description).find()) return description; // else check for special characters if (numberFirst.matcher(description).find()) description = "N" + description; return description.replaceAll("\\)|\\(|=|,|;|\\[|\\]", ""); } ////////////////////////////////////////////////////////////////////////// /** Identification of center e.g. 88 for Oslo */ private int center_id; /** Identification of center defined sub-center - not fully implemented yet. */ private int subcenter_id; /** Identification of parameter table version number. */ private int table_number; /** Stores the name of the file containing this table - not opened unless required for lookup. */ private String filename = null; /** path of filename containing this table. Opened if required for lookup. */ private String path = null; /** Map ids to GridParameter objects */ private Map<String, GridParameter> parameters = null; private GribPDSParamTable() {} public int getCenter_id() { return center_id; } public int getSubcenter_id() { return subcenter_id; } public int getTable_number() { return table_number; } public String getPath() { return path; } public String getFilename() { return filename; } public Map<String, GridParameter> getParameters() { if (parameters == null) readParameterTable(); return parameters; } /** Read parameter table. */ private void readParameterTable() { if (path == null) { logger.error("GribPDSParamTable: unknown path for " + this); return; } try { InputStream is = GribResourceReader.getInputStream(path); if (is == null) { logger.error("GribPDSParamTable: error getInputStream on " + this); return; } BufferedReader br = new BufferedReader(new InputStreamReader(is)); // Read first line that has center, subcenter, and version of table String line = br.readLine(); if (debug) System.out.println(line); String[] tableDefArr = line.split(":"); /* LOOK - why not test values ? center = Integer.parseInt(tableDefArr[1].trim()); subcenter = Integer.parseInt(tableDefArr[2].trim()); number = Integer.parseInt(tableDefArr[3].trim()); if ((center != center_id) && (subcenter != subcenter_id) && (number != table_number)) { throw new java.io.IOException( "parameter table header values do not " + " match values in GRIB file. Possible error in lookup table."); } */ HashMap<String, GridParameter> tmpParameters = new HashMap<String, GridParameter>(); // thread safe - temp hash // rdg - added the 0 line length check to cover the case of blank lines at // the end of the parameter table file. while ((line = br.readLine()) != null) { if ((line.length() == 0) || line.startsWith("#")) { continue; } GridParameter parameter = new GridParameter(); tableDefArr = line.split(":"); parameter.setNumber(Integer.parseInt(tableDefArr[0].trim())); parameter.setName(tableDefArr[1].trim()); // check to see if unit defined, if not, parameter is undefined if (tableDefArr[2].indexOf('[') == -1) { // Undefined unit parameter.setDescription(tableDefArr[2].trim()); parameter.setUnit(tableDefArr[2].trim()); } else { String[] arr2 = tableDefArr[2].split("\\["); parameter.setDescription(makeValidDesc(arr2[0].trim())); // System.out.println( "Desc ="+ parameter.getDescription()); // Remove "]" parameter.setUnit(arr2[1].substring(0, arr2[1].lastIndexOf(']')).trim()); } tmpParameters.put(Integer.toString(parameter.getNumber()), parameter); if (debug) System.out.println( parameter.getNumber() + " " + parameter.getDescription() + " " + parameter.getUnit()); } this.parameters = tmpParameters; // thread safe } catch (IOException ioError) { logger.warn( "An error occurred in GribPDSParamTable while trying to open the parameter table " + filename + " : " + ioError); } } /** * Get the parameter with id <tt>id</tt>. * * @param id the parameter id * @return the GridParameter */ public GridParameter getParameter(int id) { GridParameter p = parameters.get(Integer.toString(id)); if (p != null) return p; logger.warn( "GribPDSParamTable: Could not find parameter " + id + " for center:" + center_id + " subcenter:" + subcenter_id + " number:" + table_number + " table " + filename); String unknown = "UnknownParameter_" + Integer.toString(id) + "_table_" + filename; return new GridParameter(id, unknown, unknown, "Unknown"); } @Override public String toString() { return "GribPDSParamTable{" + "center_id=" + center_id + ", subcenter_id=" + subcenter_id + ", table_number=" + table_number + ", filename='" + filename + '\'' + ", path='" + path + '\'' + '}'; } public static void main(String[] args) throws IOException { debug = true; addParameterUserLookup( "C:/dev/tds4.2/thredds/grib/resources/resources/grib/tables/zagreb_221_1.tab"); } }
/** * Created by 4535992 on 28/12/2015. * href:http://www.journaldev.com/2544/java-csv-parserwriter-example-using-opencsv-apache-commons-csv-and-supercsv. */ @SuppressWarnings("unused") public class OpenCsvUtilities extends FileUtilities { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OpenCsvUtilities.class); /** * Method to write a CSV Data List of Beans to a String. * * @param beans the List of Beans to convert. * @param separator the char separator. * @param <T> the generic variable. * @return the String content of the List of Beans. */ public static <T> String writeCSVDataToStringWithBeans(List<T> beans, char separator) { try { Writer writer = new StringWriter(); try (CSVWriter csvWriter = new CSVWriter(writer, separator)) { List<String[]> data = toStringArray(beans); csvWriter.writeAll(data); } return writer.toString(); } catch (IOException e) { logger.error( "Can't write the CSV String from the Bean:" + beans.get(0).getClass().getName() + " -> " + e.getMessage(), e); return ""; } } /** * Method to write a CSV Data List of Array of String to a String. * * @param content the List of Array of String to convert. * @param separator the char separator. * @return the String content of the List of Beans. */ public static String writeCSVDataToString(List<String[]> content, char separator) { try { Writer writer = new StringWriter(); CSVWriter csvWriter; if (StringUtilities.NULL_CHAR2 == separator) { csvWriter = new CSVWriter(writer, CSVWriter.DEFAULT_SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER); } else { csvWriter = new CSVWriter(writer, separator, CSVWriter.NO_QUOTE_CHARACTER); } csvWriter.writeAll(content); csvWriter.close(); return writer.toString(); } catch (IOException e) { logger.error("Can't write the CSV String -> " + e.getMessage(), e); return ""; } } /** * Method to write a CSV Data List of Array of String to a String. * * @param content the List of Array of String to convert. * @return the String content of the List of Beans. */ public static String writeCSVDataToString(List<String[]> content) { return writeCSVDataToString(content, StringUtilities.NULL_CHAR2); } /** * Method to write a CSV File from List of Array of String. * * @param content the List of Array of String to convert. * @param separator the char separator. * @param fileOutputCsv the output File Csv to create. * @return the File Csv created. */ public static File writeCSVDataToFile( List<String[]> content, char separator, File fileOutputCsv) { try { Writer writer = new FileWriter(fileOutputCsv, true); // the true value make append the result... CSVWriter csvWriter; if (StringUtilities.NULL_CHAR2 == separator) { csvWriter = new CSVWriter(writer, CSVWriter.DEFAULT_SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER); } else { csvWriter = new CSVWriter(writer, separator, CSVWriter.NO_QUOTE_CHARACTER); } csvWriter.writeAll(content); csvWriter.close(); return fileOutputCsv; } catch (IOException e) { logger.error("Can't write the CSV to File:" + fileOutputCsv + " -> " + e.getMessage(), e); return null; } } /** * Method to write a CSV File from List of Array of String. * * @param content the List of Array of String to convert. * @param fileOutputCsv the output File Csv to create. * @return the File Csv created. */ public static File writeCSVDataToFile(List<String[]> content, File fileOutputCsv) { return writeCSVDataToFile(content, StringUtilities.NULL_CHAR2, fileOutputCsv); } /** * Method to write a CSV List of Array of String to System Console.. * * @param content the List of Array of String to convert. * @param separator the char separator. */ public static void writeCSVDataToConsole(List<String[]> content, char separator) { try { Writer writer = new OutputStreamWriter(System.out, StringUtilities.UTF_8); CSVWriter csvWriter; if (StringUtilities.NULL_CHAR2 == separator) { csvWriter = new CSVWriter(writer, CSVWriter.DEFAULT_SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER); } else { csvWriter = new CSVWriter(writer, separator, CSVWriter.NO_QUOTE_CHARACTER); } csvWriter.writeAll(content, false); csvWriter.close(); } catch (IOException e) { logger.error("Can't write the CSV to Console -> " + e.getMessage(), e); } } /** * Method to write a CSV List of Array of String to System Console.. * * @param content the List of Array of String to convert. */ public static void writeCSVDataToConsole(List<String[]> content) { writeCSVDataToConsole(content, StringUtilities.NULL_CHAR2); } /** * Method to convert a List of beans to a List of Array of Strings. * * @param beans the List of Beans. * @param <T> generic value. * @return the List of Array of String content of the csv. */ public static <T> List<String[]> toStringArray(List<T> beans) { return toStringArray(beans, null); } /** * Method to convert a List of beans to a List of Array of Strings. * * @param beans the List of Beans. * @param addNewHeader the new String Array for the Header row. * @param <T> generic value. * @return the List of Array of String content of the csv. */ @SuppressWarnings("unchecked") public static <T> List<String[]> toStringArray(List<T> beans, String[] addNewHeader) { List<String[]> records = new ArrayList<>(); // add header record // records.add(new String[]{"ID","Name","Role","Salary"}); if (addNewHeader != null) records.add(addNewHeader); for (T bean : beans) { // beans.stream().map((bean) -> { List<String> record = new ArrayList<>(); // invoke getter method and convert to String Class<T> clazz = (Class<T>) bean.getClass(); // T t = ReflectionUtilities.invokeConstructor(clazz); List<Method> getter = (List<Method>) ReflectionUtilities.findGetters(clazz, true); for (Method method : getter) { record.add(String.valueOf(ReflectionUtilities.invokeGetter(bean, method))); } // getter.stream().forEach((method) -> // record.add(String.valueOf(ReflectionUtilities.invokeGetter(bean,method)))); // return record; // }).forEach((record) -> records.add(ListUtilities.toArray(record))); records.add(ListUtilities.toArray(record)); } return records; } /** * Method use OpenCsv Library for * * @param columnMapping Map allow the user to pass the column Names to a Field Names of the Class. * @param fileInputCsv the File CSV to parse. * @param <T> the generic variable. * @return the List of Bean parsed from the CSV file. */ public static <T> List<T> parseCSVFileAsList( Map<String, String> columnMapping, File fileInputCsv) { try { HeaderColumnNameTranslateMappingStrategy<T> beanStrategy = new HeaderColumnNameTranslateMappingStrategy<>(); // beanStrategy.setType(clazz); //deprecated /*Map<String, String> columnMapping = new HashMap<>(); columnMapping.put("ID", "id"); columnMapping.put("Name", "name"); columnMapping.put("Role", "role");*/ beanStrategy.setColumnMapping(columnMapping); CsvToBean<T> csvToBean = new CsvToBean<>(); CSVReader reader = new CSVReader(new FileReader(fileInputCsv)); return csvToBean.parse(beanStrategy, reader); } catch (IOException e) { logger.error( "Can't parse the CSV file:" + fileInputCsv.getAbsolutePath() + " -> " + e.getMessage(), e); return new ArrayList<>(); } } /** * Method use OpenCsv Library for * * @param clazz the Class of the Bean. * @param fileInputCsv the File CSV to parse. * @param separator the char separator. * @param <T> the generic variable. * @return the List of Bean parsed from the CSV file. */ public static <T> List<T> parseCSVFileAsList(Class<T> clazz, File fileInputCsv, char separator) { try { List<T> beans; try ( // create CSVReader object CSVReader reader = new CSVReader(new FileReader(fileInputCsv), separator)) { beans = new ArrayList<>(); // read line by line String[] record; // skip header row String[] headers = reader.readNext(); // read content while ((record = reader.readNext()) != null) { T t = ReflectionUtilities.invokeConstructor(clazz); for (int i = 0; i < record.length; i++) { String nameMethod = "set" + org.apache.commons.lang3.StringUtils.capitalize(headers[i]); // invoke setter method if (ReflectionUtilities.checkMethod(clazz, nameMethod)) { ReflectionUtilities.invokeSetter(t, nameMethod, record[i]); } else { logger.warn( "Not exists the Method with name:" + nameMethod + " on the Bean:" + t.getClass().getName()); } } beans.add(t); } } return beans; } catch (IOException e) { logger.error( "Can't parse the CSV file:" + fileInputCsv.getAbsolutePath() + " -> " + e.getMessage(), e); return new ArrayList<>(); } } /** * Method use OpenCsv Library for * * @param fileInputCsv the File CSV to parse. * @param separator the char separator. * @return the List of Bean parsed from the CSV file. */ public static List<String[]> parseCSVFileAsList(File fileInputCsv, char separator) { try { List<String[]> records; // read all lines at once try ( // create CSVReader object CSVReader reader = new CSVReader(new FileReader(fileInputCsv), separator)) { // read all lines at once records = reader.readAll(); Iterator<String[]> iterator = records.iterator(); records.clear(); // skip header row iterator.next(); while (iterator.hasNext()) { String[] record = iterator.next(); records.add(record); } } return records; } catch (IOException e) { logger.error( "Can't parse the CSV file:" + fileInputCsv.getAbsolutePath() + " -> " + e.getMessage(), e); return new ArrayList<>(); } } /** * Method to get the content of a comma separated file (.csv,.input,.txt) * * @param CSV the File comma separated. * @param noHeaders if true jump the first line of the content. * @return the List of Array of the content of the File comma separated. */ public static List<String[]> parseCSVFileAsList(File CSV, boolean noHeaders) { List<String[]> content; try { CSVReader reader1 = new CSVReader(new FileReader(CSV)); content = reader1.readAll(); /* List<String[]> myDatas = reader1.readAll(); String[] lineI = myDatas.get(i); for (String[] line : myDatas) { for (String value : line) { //do stuff with value } }*/ if (noHeaders) content.remove(0); if (content.get(0).length <= 1) { logger.warn( "Attention: You haven't parsed correctly the CSV file with OpenCSV API try with Univocity Method"); } return content; } catch (IOException e) { logger.error("Can't find the CSV File:" + e.getMessage(), e); return null; } } /** * Method to get the String array of the columns of a CSV File. * * @param fileCSV the File CSV. * @param hasFirstLine if true the first line of CSV File contains the columns name. * @return a String Array of the columns. */ public static String[] getHeaders(File fileCSV, boolean hasFirstLine) { String[] columns = new String[0]; try { CSVReader reader = new CSVReader(new FileReader(fileCSV)); columns = reader.readNext(); // assuming first read if (!hasFirstLine) { int columnCount = 0; if (columns != null) columnCount = columns.length; columns = new String[columnCount]; for (int i = 0; i < columnCount; i++) { columns[i] = "Column#" + i; } } } catch (IOException e) { logger.error("Can't find the Headers on the CSV File", e); } return columns; } /** * Method to get the String array of the columns of a CSV File. * * @param contentWithHeaders the {@link List} of {@link String} array of the content of the csv. * @return a String Array of the columns. */ public static String[] getHeaders(List<String[]> contentWithHeaders) { String[] columns = new String[0]; try { String[] headers = contentWithHeaders.get(0); if (headers.length <= 1) { throw new Exception("Can't find the delimiter with openCSV try with Univicity method."); } } catch (Exception e) { logger.error("Can't find the Headers on the content", e); } return columns; } /** * Method to get the String array of the columns of a CSV File. * * @param fileCSV the File CSV. * @param hasFirstLine if true the first line of CSV File contains the columns name. * @return a String Array of the columns. */ public static String[] getHeadersWithUnivocity(File fileCSV, boolean hasFirstLine) { return UnivocityUtilities.getHeaders(fileCSV, hasFirstLine); } // ------------------------------------------------------------------------------ /** * Method use OpenCsv Library for * * @param fileInputCsv the File CSV to parse. * @param noHeaders if true the headers are excluded from the cotent. * @return the List of Bean parsed from the CSV file. */ public static List<String[]> parseCSVFileAsListWithUnivocity( File fileInputCsv, boolean noHeaders) { return UnivocityUtilities.parseCSVFileAsList(fileInputCsv, noHeaders); } /* public static void main(String[] args) throws IOException { List<Employee> emps = parseCSVFileLineByLine(); System.out.println("**********"); parseCSVFileAsList(); System.out.println("**********"); parseCSVToBeanList(); System.out.println("**********"); writeCSVData(emps); }*/ public static String getFieldLatitude(String[] headers) { Pattern pattern = Pattern.compile("(L|l)(at)(itude)?", Pattern.CASE_INSENSITIVE); return getField(headers, pattern); } public static String getFieldLongitude(String[] headers) { Pattern pattern = Pattern.compile("(L|l)(on|ng)(gitude)?", Pattern.CASE_INSENSITIVE); return getField(headers, pattern); } public static String getField(String[] headers, Pattern pattern) { // String[] headers = CSVGetHeaders(headers,true); for (String s : headers) { if (pattern.matcher(String.valueOf(s)).matches()) { return s; } } return "NULL"; } /** * Parse CSV file using OpenCSV library and load in given database table. href: * http://viralpatel.net/blogs/java-load-csv-file-to-database/. Modified by rammar: * https://github.com/BaderLab/pharmacogenomics/blob/master/src/CSVLoader/CSVLoader.java * * @param connection the {@link Connection} SQL. * @param separator the {@link Character} separator. * @param csvFile Input CSV InputStream * @param tableName Database table name to import data * @param truncateBeforeLoad Truncate the table before inserting new records. * @throws SQLException if any error is occurred with the SQL Connection. * @throws java.io.IOException if any error is occurred with the file. */ public static void loadCSVToSQLTable( Connection connection, char separator, InputStream csvFile, String tableName, boolean truncateBeforeLoad) throws SQLException, IOException { String SQL_INSERT = "INSERT INTO ${table}(${keys}) VALUES(${values})"; String TABLE_REGEX = "\\$\\{table\\}"; String KEYS_REGEX = "\\$\\{keys\\}"; String VALUES_REGEX = "\\$\\{values\\}"; CSVReader csvReader; if (null == connection) { throw new SQLException("Not a valid connection."); } try { /* Modified by rammar. * * I was having issues with the CSVReader using the "\" to escape characters. * A MySQL CSV file contains quote-enclosed fields and non-quote-enclosed NULL * values written as "\N". The CSVReader was removing the "\". To detect "\N" * I must remove the escape character, and the only character you can replace * it with that you are pretty much guaranteed will not be used to escape * text is '\0'. * I read this on: * http://stackoverflow.com/questions/6008395/opencsv-in-java-ignores-backslash-in-a-field-value * based on: * http://sourceforge.net/p/opencsv/support-requests/5/ */ // PREVIOUS VERSION: csvReader = new CSVReader(new FileReader(csvFile), this.seprator); csvReader = new CSVReader(new InputStreamReader(csvFile), separator, '"', '\0'); } catch (Exception e) { throw new IOException("Error occured while executing file. " + e.getMessage()); } String[] headerRow = csvReader.readNext(); if (null == headerRow) { throw new FileNotFoundException( "No columns defined in given CSV file." + "Please check the CSV file format."); } String questionmarks = StringUtils.repeat("?,", headerRow.length); questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1); /* NOTE from Ron: Header column names must match SQL table fields */ String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName); query = query.replaceFirst(KEYS_REGEX, StringUtils.join(headerRow, ",")); query = query.replaceFirst(VALUES_REGEX, questionmarks); // System.out.println("Query: " + query); // Modified by rammar to suppress output String[] nextLine; Connection con = null; PreparedStatement ps = null; try { con = connection; con.setAutoCommit(false); ps = con.prepareStatement(query); if (truncateBeforeLoad) { // delete data from table before loading csv con.createStatement().execute("DELETE FROM " + tableName); } final int batchSize = 1000; int count = 0; Date date; while ((nextLine = csvReader.readNext()) != null) { int index = 1; for (String string : nextLine) { date = DateUtilities.convertToDate(string); if (null != date) { ps.setDate(index++, new java.sql.Date(date.getTime())); } else { /* Section modified by rammar to allow NULL values * to be input into the DB. */ if (string.length() > 0 && !string.equals("\\N")) { ps.setString(index++, string); } else { ps.setNull(index++, Types.VARCHAR); // ps.setString(index++, null); // can use this syntax also - not sure which is better } } } ps.addBatch(); if (++count % batchSize == 0) { ps.executeBatch(); } } ps.executeBatch(); // insert remaining records logger.info(count + " records loaded into " + tableName + " DB table"); con.commit(); } catch (SQLException | IOException e) { con.rollback(); throw new IOException( "Error occured while loading data from file to database." + e.getMessage()); } finally { if (null != ps) ps.close(); con.close(); csvReader.close(); } } }
public class ServletUtil { public static final String CONTENT_TEXT = "text/plain; charset=utf-8"; // bogus status returns for our logging public static final int STATUS_CLIENT_ABORT = 1000; public static final int STATUS_FORWARDED = 1001; public static final int STATUS_FORWARD_FAILURE = 1002; private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(ServletUtil.class); private static boolean isDebugInit = false; private static String contextPath = null; private static String rootPath = null; private static String contentPath = null; /** * @param context the Servlet context. * @deprecated Now handled in TdsContext.init(). */ public static void initContext(ServletContext context) { // setContextPath(context); if (contextPath == null) { // Servlet 2.5 allows the following. // contextPath = servletContext.getContextPath(); String tmpContextPath = context.getInitParameter("ContextPath"); // cannot be overridden in the ThreddsConfig file if (tmpContextPath == null) tmpContextPath = "thredds"; contextPath = "/" + tmpContextPath; } // setRootPath(context); if (rootPath == null) { rootPath = context.getRealPath("/"); rootPath = rootPath.replace('\\', '/'); } // setContentPath(); if (contentPath == null) { String tmpContentPath = "../../content" + getContextPath() + "/"; File cf = new File(getRootPath(), tmpContentPath); try { contentPath = cf.getCanonicalPath() + "/"; contentPath = contentPath.replace('\\', '/'); } catch (IOException e) { throw new RuntimeException(e.getMessage()); } } // initDebugging(context); initDebugging(context); } public static void setContextPath(String newContextPath) { contextPath = newContextPath; } public static void setRootPath(String newRootPath) { rootPath = newRootPath; } public static void setContentPath(String newContentPath) { contentPath = newContentPath; if (!contentPath.endsWith("/")) contentPath = contentPath + "/"; } public static void initDebugging(ServletContext webapp) { if (isDebugInit) return; isDebugInit = true; String debugOn = webapp.getInitParameter("DebugOn"); if (debugOn != null) { StringTokenizer toker = new StringTokenizer(debugOn); while (toker.hasMoreTokens()) Debug.set(toker.nextToken(), true); } } /** * Return the real path on the servers file system that corresponds to the root document ("/") on * the given servlet. * * @return the real path on the servers file system that corresponds to the root document ("/") on * the given servlet. */ public static String getRootPath() { return rootPath; } /** * Return the context path for the given servlet. Note - ToDo: Why not just use * ServletContext.getServletContextName()? * * @return the context path for the given servlet. */ public static String getContextPath() { return contextPath; } /** * Return the content path for the given servlet. * * @return the content path for the given servlet. */ public static String getContentPath() { return contentPath; } /** * Return the default/initial content path for the given servlet. The content of which is copied * to the content path when the web app is first installed. * * @return the default/initial content path for the given servlet. */ public static String getInitialContentPath() { return getRootPath() + "/WEB-INF/altContent/startup/"; } /** * Return the file path dealing with leading and trailing path seperators (which must be a slash * ("/")) for the given directory and file paths. * * <p>Note: Dealing with path strings is fragile. ToDo: Switch from using path strings to * java.io.Files. * * @param dirPath the directory path. * @param filePath the file path. * @return a full file path with the given directory and file paths. */ public static String formFilename(String dirPath, String filePath) { if ((dirPath == null) || (filePath == null)) return null; if (filePath.startsWith("/")) filePath = filePath.substring(1); return dirPath.endsWith("/") ? dirPath + filePath : dirPath + "/" + filePath; } /** * Handle a request for a raw/static file (i.e., not a catalog or dataset request). * * <p>Look in the content (user) directory then the root (distribution) directory for a file that * matches the given path and, if found, return it as the content of the HttpServletResponse. If * the file is forbidden (i.e., the path contains a "..", "WEB-INF", or "META-INF" directory), * send a HttpServletResponse.SC_FORBIDDEN response. If no file matches the request (including an * "index.html" file if the path ends in "/"), send an HttpServletResponse.SC_NOT_FOUND.. * * <p> * * <ol> * <li>Make sure the path does not contain ".." directories. * <li>Make sure the path does not contain "WEB-INF" or "META-INF". * <li>Check for requested file in the content directory (if the path is a directory, make sure * the path ends with "/" and check for an "index.html" file). * <li>Check for requested file in the root directory (if the path is a directory, make sure the * path ends with "/" and check for an "index.html" file). </ol * * @param path the requested path * @param servlet the servlet handling the request * @param req the HttpServletRequest * @param res the HttpServletResponse * @throws IOException if can't complete request due to IO problems. */ public static void handleRequestForRawFile( String path, HttpServlet servlet, HttpServletRequest req, HttpServletResponse res) throws IOException { // Don't allow ".." directories in path. if (path.indexOf("/../") != -1 || path.equals("..") || path.startsWith("../") || path.endsWith("/..")) { res.sendError(HttpServletResponse.SC_FORBIDDEN, "Path cannot contain \"..\" directory."); log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_FORBIDDEN, -1)); return; } // Don't allow access to WEB-INF or META-INF directories. String upper = path.toUpperCase(); if (upper.indexOf("WEB-INF") != -1 || upper.indexOf("META-INF") != -1) { res.sendError( HttpServletResponse.SC_FORBIDDEN, "Path cannot contain \"WEB-INF\" or \"META-INF\"."); log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_FORBIDDEN, -1)); return; } // Find a regular file File regFile = null; // Look in content directory for regular file. File cFile = new File(ServletUtil.formFilename(getContentPath(), path)); if (cFile.exists()) { if (cFile.isDirectory()) { if (!path.endsWith("/")) { String newPath = req.getRequestURL().append("/").toString(); ServletUtil.sendPermanentRedirect(newPath, req, res); } // If request path is a directory, check for index.html file. cFile = new File(cFile, "index.html"); if (cFile.exists() && !cFile.isDirectory()) regFile = cFile; } // If not a directory, use this file. else regFile = cFile; } if (regFile == null) { // Look in root directory. File rFile = new File(ServletUtil.formFilename(getRootPath(), path)); if (rFile.exists()) { if (rFile.isDirectory()) { if (!path.endsWith("/")) { String newPath = req.getRequestURL().append("/").toString(); ServletUtil.sendPermanentRedirect(newPath, req, res); } rFile = new File(rFile, "index.html"); if (rFile.exists() && !rFile.isDirectory()) regFile = rFile; } else regFile = rFile; } } if (regFile == null) { res.sendError(HttpServletResponse.SC_NOT_FOUND); // 404 log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, -1)); return; } ServletUtil.returnFile(servlet, req, res, regFile, null); } /** * Handle an explicit request for a content directory file (path must start with "/content/". * * <p>Note: As these requests will show the configuration files for the server, these requests * should be covered by security constraints. * * <p> * * <ol> * <li>Make sure the path does not contain ".." directories. * <li>Check for the requested file in the content directory. </ol * * @param path the requested path (must start with "/content/") * @param servlet the servlet handling the request * @param req the HttpServletRequest * @param res the HttpServletResponse * @throws IOException if can't complete request due to IO problems. */ public static void handleRequestForContentFile( String path, HttpServlet servlet, HttpServletRequest req, HttpServletResponse res) throws IOException { handleRequestForContentOrRootFile("/content/", path, servlet, req, res); } /** * Handle an explicit request for a root directory file (path must start with "/root/". * * <p>Note: As these requests will show the configuration files for the server, these requests * should be covered by security constraints. * * <p> * * <ol> * <li>Make sure the path does not contain ".." directories. * <li>Check for the requested file in the root directory. </ol * * @param path the requested path (must start with "/root/") * @param servlet the servlet handling the request * @param req the HttpServletRequest * @param res the HttpServletResponse * @throws IOException if can't complete request due to IO problems. */ public static void handleRequestForRootFile( String path, HttpServlet servlet, HttpServletRequest req, HttpServletResponse res) throws IOException { handleRequestForContentOrRootFile("/root/", path, servlet, req, res); } /** * Convenience routine used by handleRequestForContentFile() and handleRequestForRootFile(). * * @param pathPrefix * @param path * @param servlet * @param req request * @param res response * @throws IOException on IO error */ private static void handleRequestForContentOrRootFile( String pathPrefix, String path, HttpServlet servlet, HttpServletRequest req, HttpServletResponse res) throws IOException { if (!pathPrefix.equals("/content/") && !pathPrefix.equals("/root/")) { log.error( "handleRequestForContentFile(): The path prefix <" + pathPrefix + "> must be \"/content/\" or \"/root/\"."); throw new IllegalArgumentException("Path prefix must be \"/content/\" or \"/root/\"."); } if (!path.startsWith(pathPrefix)) { log.error( "handleRequestForContentFile(): path <" + path + "> must start with \"" + pathPrefix + "\"."); throw new IllegalArgumentException("Path must start with \"" + pathPrefix + "\"."); } // Don't allow ".." directories in path. if (path.indexOf("/../") != -1 || path.equals("..") || path.startsWith("../") || path.endsWith("/..")) { res.sendError(HttpServletResponse.SC_FORBIDDEN, "Path cannot contain \"..\" directory."); log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_FORBIDDEN, -1)); return; } // Find the requested file. File file = new File( ServletUtil.formFilename(getContentPath(), path.substring(pathPrefix.length() - 1))); if (file.exists()) { // Do not allow request for a directory. if (file.isDirectory()) { if (!path.endsWith("/")) { String redirectPath = req.getRequestURL().append("/").toString(); ServletUtil.sendPermanentRedirect(redirectPath, req, res); return; } int i = HtmlWriter.getInstance().writeDirectory(res, file, path); int status = i == 0 ? HttpServletResponse.SC_NOT_FOUND : HttpServletResponse.SC_OK; log.info(UsageLog.closingMessageForRequestContext(status, i)); return; } // Return the requested file. ServletUtil.returnFile(servlet, req, res, file, null); } else { // Requested file not found. log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, -1)); res.sendError(HttpServletResponse.SC_NOT_FOUND); // 404 } } /** * Send a permanent redirect (HTTP status 301 "Moved Permanently") response with the given target * path. * * <p>The given target path may be relative or absolute. If it is relative, it will be resolved * against the request URL. * * @param targetPath the path to which the client is redirected. * @param req the HttpServletRequest * @param res the HttpServletResponse * @throws IOException if can't write the response. */ public static void sendPermanentRedirect( String targetPath, HttpServletRequest req, HttpServletResponse res) throws IOException { // Absolute URL needed so resolve the target path against the request URL. URI uri; try { uri = new URI(req.getRequestURL().toString()); } catch (URISyntaxException e) { log.error( "sendPermanentRedirect(): Bad syntax on request URL <" + req.getRequestURL() + ">.", e); log.info( "sendPermanentRedirect(): " + UsageLog.closingMessageForRequestContext( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, 0)); if (!res.isCommitted()) res.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } String absolutePath = uri.resolve(targetPath).toString(); absolutePath = res.encodeRedirectURL(absolutePath); res.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY); res.addHeader("Location", absolutePath); String title = "Permanently Moved - 301"; String body = new StringBuilder() .append("<p>") .append("The requested URL <") .append(req.getRequestURL()) .append("> has been permanently moved (HTTP status code 301).") .append(" Instead, please use the following URL: <a href=\"") .append(absolutePath) .append("\">") .append(absolutePath) .append("</a>.") .append("</p>") .toString(); String htmlResp = new StringBuilder() .append(HtmlWriter.getInstance().getHtmlDoctypeAndOpenTag()) .append("<head><title>") .append(title) .append("</title></head><body>") .append("<h1>") .append(title) .append("</h1>") .append(body) .append("</body></html>") .toString(); log.info("sendPermanentRedirect(): redirect to " + absolutePath); log.info( "sendPermanentRedirect(): " + UsageLog.closingMessageForRequestContext( HttpServletResponse.SC_MOVED_PERMANENTLY, htmlResp.length())); // Write the catalog out. PrintWriter out = res.getWriter(); res.setContentType("text/html"); out.print(htmlResp); out.flush(); } /** * Write a file to the response stream. * * @param servlet called from here * @param contentPath file root path * @param path file path reletive to the root * @param req the request * @param res the response * @param contentType content type, or null * @throws IOException on write error */ public static void returnFile( HttpServlet servlet, String contentPath, String path, HttpServletRequest req, HttpServletResponse res, String contentType) throws IOException { String filename = ServletUtil.formFilename(contentPath, path); log.debug("returnFile(): returning file <" + filename + ">."); // No file, nothing to view if (filename == null) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } // dontallow .. if (filename.indexOf("..") != -1) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_FORBIDDEN, 0)); res.sendError(HttpServletResponse.SC_FORBIDDEN); return; } // dont allow access to WEB-INF or META-INF String upper = filename.toUpperCase(); if (upper.indexOf("WEB-INF") != -1 || upper.indexOf("META-INF") != -1) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_FORBIDDEN, 0)); res.sendError(HttpServletResponse.SC_FORBIDDEN); return; } returnFile(servlet, req, res, new File(filename), contentType); } private static FileCacheRaf fileCacheRaf; public static void setFileCache(FileCacheRaf fileCache) { fileCacheRaf = fileCache; } public static FileCacheRaf getFileCache() { return fileCacheRaf; } /** * Write a file to the response stream. Handles Range requests. * * @param servlet called from here * @param req the request * @param res the response * @param file to serve * @param contentType content type, if null, will try to guess * @throws IOException on write error */ public static void returnFile( HttpServlet servlet, HttpServletRequest req, HttpServletResponse res, File file, String contentType) throws IOException { // No file, nothing to view if (file == null) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } // check that it exists if (!file.exists()) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } // not a directory if (!file.isFile()) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_BAD_REQUEST, 0)); res.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } // Set the type of the file String filename = file.getPath(); if (null == contentType) { if (filename.endsWith(".html")) contentType = "text/html; charset=iso-8859-1"; else if (filename.endsWith(".xml")) contentType = "text/xml; charset=iso-8859-1"; else if (filename.endsWith(".txt") || (filename.endsWith(".log"))) contentType = CONTENT_TEXT; else if (filename.indexOf(".log.") > 0) contentType = CONTENT_TEXT; else if (filename.endsWith(".nc")) contentType = "application/x-netcdf"; else contentType = servlet.getServletContext().getMimeType(filename); if (contentType == null) contentType = "application/octet-stream"; } returnFile(req, res, file, contentType); } /** * Write a file to the response stream. Handles Range requests. * * @param req request * @param res response * @param file must exists and not be a directory * @param contentType must not be null * @throws IOException or error */ public static void returnFile( HttpServletRequest req, HttpServletResponse res, File file, String contentType) throws IOException { res.setContentType(contentType); // see if its a Range Request boolean isRangeRequest = false; long startPos = 0, endPos = Long.MAX_VALUE; String rangeRequest = req.getHeader("Range"); if (rangeRequest != null) { // bytes=12-34 or bytes=12- int pos = rangeRequest.indexOf("="); if (pos > 0) { int pos2 = rangeRequest.indexOf("-"); if (pos2 > 0) { String startString = rangeRequest.substring(pos + 1, pos2); String endString = rangeRequest.substring(pos2 + 1); startPos = Long.parseLong(startString); if (endString.length() > 0) endPos = Long.parseLong(endString) + 1; isRangeRequest = true; } } } // set content length long fileSize = file.length(); long contentLength = fileSize; if (isRangeRequest) { endPos = Math.min(endPos, fileSize); contentLength = endPos - startPos; } if (contentLength > Integer.MAX_VALUE) res.addHeader( "Content-Length", Long.toString(contentLength)); // allow content length > MAX_INT else res.setContentLength((int) contentLength); // note HEAD only allows this String filename = file.getPath(); boolean debugRequest = Debug.isSet("returnFile"); if (debugRequest) log.debug( "returnFile(): filename = " + filename + " contentType = " + contentType + " contentLength = " + contentLength); // indicate we allow Range Requests res.addHeader("Accept-Ranges", "bytes"); if (req.getMethod().equals("HEAD")) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_OK, 0)); return; } try { if (isRangeRequest) { // set before content is sent res.addHeader("Content-Range", "bytes " + startPos + "-" + (endPos - 1) + "/" + fileSize); res.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); FileCacheRaf.Raf craf = null; try { craf = fileCacheRaf.acquire(filename); IO.copyRafB( craf.getRaf(), startPos, contentLength, res.getOutputStream(), new byte[60000]); log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext( HttpServletResponse.SC_PARTIAL_CONTENT, contentLength)); return; } finally { if (craf != null) fileCacheRaf.release(craf); } } // Return the file ServletOutputStream out = res.getOutputStream(); IO.copyFileB(file, out, 60000); res.flushBuffer(); out.close(); if (debugRequest) log.debug("returnFile(): returnFile ok = " + filename); log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_OK, contentLength)); } // @todo Split up this exception handling: those from file access vs those from dealing with // response // File access: catch and res.sendError() // response: don't catch (let bubble up out of doGet() etc) catch (FileNotFoundException e) { log.error("returnFile(): FileNotFoundException= " + filename); log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); if (!res.isCommitted()) res.sendError(HttpServletResponse.SC_NOT_FOUND); } catch (java.net.SocketException e) { log.info("returnFile(): SocketException sending file: " + filename + " " + e.getMessage()); log.info("returnFile(): " + UsageLog.closingMessageForRequestContext(STATUS_CLIENT_ABORT, 0)); } catch (IOException e) { String eName = e.getClass().getName(); // dont want compile time dependency on ClientAbortException if (eName.equals("org.apache.catalina.connector.ClientAbortException")) { log.info( "returnFile(): ClientAbortException while sending file: " + filename + " " + e.getMessage()); log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(STATUS_CLIENT_ABORT, 0)); return; } log.error("returnFile(): IOException (" + e.getClass().getName() + ") sending file ", e); log.error( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); if (!res.isCommitted()) res.sendError(HttpServletResponse.SC_NOT_FOUND, "Problem sending file: " + e.getMessage()); } } /** * Send given content string as the HTTP response. * * @param contents the string to return as the HTTP response. * @param res the HttpServletResponse * @throws IOException if an I/O error occurs while writing the response. */ public static void returnString(String contents, HttpServletResponse res) throws IOException { try { ServletOutputStream out = res.getOutputStream(); IO.copy(new ByteArrayInputStream(contents.getBytes()), out); log.info( UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_OK, contents.length())); } catch (IOException e) { log.error(" IOException sending string: ", e); log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); res.sendError(HttpServletResponse.SC_NOT_FOUND, "Problem sending string: " + e.getMessage()); } } /** * Return the request URL relative to the server (i.e., starting with the context path). * * @param req request * @return URL relative to the server */ public static String getReletiveURL(HttpServletRequest req) { return req.getContextPath() + req.getServletPath() + req.getPathInfo(); } /** * Forward this request to the CatalogServices servlet ("/catalog.html"). * * @param req request * @param res response * @throws IOException on IO error * @throws ServletException other error */ public static void forwardToCatalogServices(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException { String reqs = "catalog=" + getReletiveURL(req); String query = req.getQueryString(); if (query != null) reqs = reqs + "&" + query; log.info("forwardToCatalogServices(): request string = \"/catalog.html?" + reqs + "\""); // dispatch to CatalogHtml servlet RequestForwardUtils.forwardRequestRelativeToCurrentContext("/catalog.html?" + reqs, req, res); } public static boolean saveFile( HttpServlet servlet, String contentPath, String path, HttpServletRequest req, HttpServletResponse res) { // @todo Need to use logServerAccess() below here. boolean debugRequest = Debug.isSet("SaveFile"); if (debugRequest) log.debug(" saveFile(): path= " + path); String filename = contentPath + path; // absolute path File want = new File(filename); // backup current version if it exists int version = getBackupVersion(want.getParent(), want.getName()); String fileSave = filename + "~" + version; File file = new File(filename); if (file.exists()) { try { IO.copyFile(filename, fileSave); } catch (IOException e) { log.error( "saveFile(): Unable to save copy of file " + filename + " to " + fileSave + "\n" + e.getMessage()); return false; } } // save new file try { OutputStream out = new BufferedOutputStream(new FileOutputStream(filename)); IO.copy(req.getInputStream(), out); out.close(); if (debugRequest) log.debug("saveFile(): ok= " + filename); res.setStatus(HttpServletResponse.SC_CREATED); log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_CREATED, -1)); return true; } catch (IOException e) { log.error( "saveFile(): Unable to PUT file " + filename + " to " + fileSave + "\n" + e.getMessage()); return false; } } private static int getBackupVersion(String dirName, String fileName) { int maxN = 0; File dir = new File(dirName); if (!dir.exists()) return -1; String[] files = dir.list(); if (null == files) return -1; for (String name : files) { if (name.indexOf(fileName) < 0) continue; int pos = name.indexOf('~'); if (pos < 0) continue; String ver = name.substring(pos + 1); int n = 0; try { n = Integer.parseInt(ver); } catch (NumberFormatException e) { log.error("Format Integer error on backup filename= " + ver); } maxN = Math.max(n, maxN); } return maxN + 1; } public static boolean copyDir(String fromDir, String toDir) throws IOException { File contentFile = new File(toDir + ".INIT"); if (!contentFile.exists()) { IO.copyDirTree(fromDir, toDir); contentFile.createNewFile(); return true; } return false; } /** * ************************************************************************ Sends an error to the * client. * * @param t The exception that caused the problem. * @param res The <code>HttpServletResponse</code> for the client. */ public static void handleException(Throwable t, HttpServletResponse res) { try { String message = t.getMessage(); if (message == null) message = "NULL message " + t.getClass().getName(); if (Debug.isSet("trustedMode")) { // security issue: only show stack if trusted ByteArrayOutputStream bs = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(bs); t.printStackTrace(ps); message = new String(bs.toByteArray()); } log.info( UsageLog.closingMessageForRequestContext( HttpServletResponse.SC_BAD_REQUEST, message.length())); log.error("handleException", t); t.printStackTrace(); // debugging - log.error not showing stack trace !! if (!res.isCommitted()) res.sendError(HttpServletResponse.SC_BAD_REQUEST, message); } catch (Throwable e) { log.error("handleException() had problem reporting Exception", e); t.printStackTrace(); } } public static void showServerInfo(PrintStream out) { out.println("Server Info"); out.println( " getDocumentBuilderFactoryVersion(): " + XMLEntityResolver.getDocumentBuilderFactoryVersion()); out.println(); Properties sysp = System.getProperties(); Enumeration e = sysp.propertyNames(); List<String> list = Collections.list(e); Collections.sort(list); out.println("System Properties:"); for (String name : list) { String value = System.getProperty(name); out.println(" " + name + " = " + value); } out.println(); } public static void showServletInfo(HttpServlet servlet, PrintStream out) { out.println("Servlet Info"); out.println(" getServletName(): " + servlet.getServletName()); out.println(" getRootPath(): " + getRootPath()); out.println(" Init Parameters:"); Enumeration params = servlet.getInitParameterNames(); while (params.hasMoreElements()) { String name = (String) params.nextElement(); out.println(" " + name + ": " + servlet.getInitParameter(name)); } out.println(); ServletContext context = servlet.getServletContext(); out.println("Context Info"); try { out.println(" context.getResource('/'): " + context.getResource("/")); } catch (java.net.MalformedURLException e) { } // cant happen out.println(" context.getServerInfo(): " + context.getServerInfo()); out.println(" name: " + getServerInfoName(context.getServerInfo())); out.println(" version: " + getServerInfoVersion(context.getServerInfo())); out.println(" context.getInitParameterNames():"); params = context.getInitParameterNames(); while (params.hasMoreElements()) { String name = (String) params.nextElement(); out.println(" " + name + ": " + context.getInitParameter(name)); } out.println(" context.getAttributeNames():"); params = context.getAttributeNames(); while (params.hasMoreElements()) { String name = (String) params.nextElement(); out.println(" context.getAttribute(\"" + name + "\"): " + context.getAttribute(name)); } out.println(); } /** * Show the pieces of the request, for debugging * * @param req the HttpServletRequest * @return parsed request */ public static String getRequestParsed(HttpServletRequest req) { return req.getRequestURI() + " = " + req.getContextPath() + "(context), " + req.getServletPath() + "(servletPath), " + req.getPathInfo() + "(pathInfo), " + req.getQueryString() + "(query)"; } /** * This is the server part, eg http://motherlode:8080 * * @param req the HttpServletRequest * @return request server */ public static String getRequestServer(HttpServletRequest req) { return req.getScheme() + "://" + req.getServerName() + ":" + req.getServerPort(); } /** * This is everything except the query string * * @param req the HttpServletRequest * @return parsed request base */ public static String getRequestBase(HttpServletRequest req) { // return "http://"+req.getServerName()+":"+ req.getServerPort()+req.getRequestURI(); return req.getRequestURL().toString(); } /** * The request base as a URI * * @param req the HttpServletRequest * @return parsed request as a URI */ public static URI getRequestURI(HttpServletRequest req) { try { return new URI(getRequestBase(req)); } catch (URISyntaxException e) { e.printStackTrace(); return null; } } /** * servletPath + pathInfo * * @param req the HttpServletRequest * @return parsed request servletPath + pathInfo */ public static String getRequestPath(HttpServletRequest req) { StringBuffer buff = new StringBuffer(); if (req.getServletPath() != null) buff.append(req.getServletPath()); if (req.getPathInfo() != null) buff.append(req.getPathInfo()); return buff.toString(); } /** * The entire request including query string * * @param req the HttpServletRequest * @return entire parsed request */ public static String getRequest(HttpServletRequest req) { String query = req.getQueryString(); return getRequestBase(req) + (query == null ? "" : "?" + query); } /** * Return the value of the given parameter for the given request. Should only be used if the * parameter is known to only have one value. If used on a multi-valued parameter, the first value * is returned. * * @param req the HttpServletRequest * @param paramName the name of the parameter to find. * @return the value of the given parameter for the given request. */ public static String getParameterIgnoreCase(HttpServletRequest req, String paramName) { Enumeration e = req.getParameterNames(); while (e.hasMoreElements()) { String s = (String) e.nextElement(); if (s.equalsIgnoreCase(paramName)) return req.getParameter(s); } return null; } /** * Return the values of the given parameter (ignoring case) for the given request. * * @param req the HttpServletRequest * @param paramName the name of the parameter to find. * @return the values of the given parameter for the given request. */ public static String[] getParameterValuesIgnoreCase(HttpServletRequest req, String paramName) { Enumeration e = req.getParameterNames(); while (e.hasMoreElements()) { String s = (String) e.nextElement(); if (s.equalsIgnoreCase(paramName)) return req.getParameterValues(s); } return null; } public static String getFileURL(String filename) { filename = filename.replace('\\', '/'); filename = StringUtil.replace(filename, ' ', "+"); return "file:" + filename; } /** * Show details about the request * * @param servlet used to get teh servlet context, may be null * @param req the request * @return string showing the details of the request. */ public static String showRequestDetail(HttpServlet servlet, HttpServletRequest req) { StringBuilder sbuff = new StringBuilder(); sbuff.append("Request Info\n"); sbuff.append(" req.getServerName(): ").append(req.getServerName()).append("\n"); sbuff.append(" req.getServerPort(): ").append(req.getServerPort()).append("\n"); sbuff.append(" req.getContextPath:").append(req.getContextPath()).append("\n"); sbuff.append(" req.getServletPath:").append(req.getServletPath()).append("\n"); sbuff.append(" req.getPathInfo:").append(req.getPathInfo()).append("\n"); sbuff.append(" req.getQueryString:").append(req.getQueryString()).append("\n"); sbuff .append(" getQueryStringDecoded:") .append(EscapeStrings.urlDecode(req.getQueryString())) .append("\n"); /*try { sbuff.append(" getQueryStringDecoded:").append(URLDecoder.decode(req.getQueryString(), "UTF-8")).append("\n"); } catch (UnsupportedEncodingException e1) { e1.printStackTrace(); }*/ sbuff.append(" req.getRequestURI:").append(req.getRequestURI()).append("\n"); sbuff.append(" getRequestBase:").append(getRequestBase(req)).append("\n"); sbuff.append(" getRequestServer:").append(getRequestServer(req)).append("\n"); sbuff.append(" getRequest:").append(getRequest(req)).append("\n"); sbuff.append("\n"); sbuff.append(" req.getPathTranslated:").append(req.getPathTranslated()).append("\n"); String path = req.getPathTranslated(); if ((path != null) && (servlet != null)) { ServletContext context = servlet.getServletContext(); sbuff.append(" getMimeType:").append(context.getMimeType(path)).append("\n"); } sbuff.append("\n"); sbuff.append(" req.getScheme:").append(req.getScheme()).append("\n"); sbuff.append(" req.getProtocol:").append(req.getProtocol()).append("\n"); sbuff.append(" req.getMethod:").append(req.getMethod()).append("\n"); sbuff.append("\n"); sbuff.append(" req.getContentType:").append(req.getContentType()).append("\n"); sbuff.append(" req.getContentLength:").append(req.getContentLength()).append("\n"); sbuff.append(" req.getRemoteAddr():").append(req.getRemoteAddr()); try { sbuff .append(" getRemoteHost():") .append(java.net.InetAddress.getByName(req.getRemoteHost()).getHostName()) .append("\n"); } catch (java.net.UnknownHostException e) { sbuff.append(" getRemoteHost():").append(e.getMessage()).append("\n"); } sbuff.append(" getRemoteUser():").append(req.getRemoteUser()).append("\n"); sbuff.append("\n"); sbuff.append("Request Parameters:\n"); Enumeration params = req.getParameterNames(); while (params.hasMoreElements()) { String name = (String) params.nextElement(); String values[] = req.getParameterValues(name); if (values != null) { for (int i = 0; i < values.length; i++) { sbuff .append(" ") .append(name) .append(" (") .append(i) .append("): ") .append(values[i]) .append("\n"); } } } sbuff.append("\n"); sbuff.append("Request Headers:\n"); Enumeration names = req.getHeaderNames(); while (names.hasMoreElements()) { String name = (String) names.nextElement(); Enumeration values = req.getHeaders(name); // support multiple values if (values != null) { while (values.hasMoreElements()) { String value = (String) values.nextElement(); sbuff.append(" ").append(name).append(": ").append(value).append("\n"); } } } sbuff.append(" ------------------\n"); return sbuff.toString(); } public static String showRequestHeaders(HttpServletRequest req) { StringBuilder sbuff = new StringBuilder(); sbuff.append("Request Headers:\n"); Enumeration names = req.getHeaderNames(); while (names.hasMoreElements()) { String name = (String) names.nextElement(); Enumeration values = req.getHeaders(name); // support multiple values if (values != null) { while (values.hasMoreElements()) { String value = (String) values.nextElement(); sbuff.append(" ").append(name).append(": ").append(value).append("\n"); } } } return sbuff.toString(); } public static void showSession(HttpServletRequest req, HttpServletResponse res, PrintStream out) { // res.setContentType("text/html"); // Get the current session object, create one if necessary HttpSession session = req.getSession(); // Increment the hit count for this page. The value is saved // in this client's session under the name "snoop.count". Integer count = (Integer) session.getAttribute("snoop.count"); if (count == null) { count = 1; } else count = count + 1; session.setAttribute("snoop.count", count); out.println(HtmlWriter.getInstance().getHtmlDoctypeAndOpenTag()); out.println("<HEAD><TITLE>SessionSnoop</TITLE></HEAD>"); out.println("<BODY><H1>Session Snoop</H1>"); // Display the hit count for this page out.println( "You've visited this page " + count + ((!(count.intValue() != 1)) ? " time." : " times.")); out.println("<P>"); out.println("<H3>Here is your saved session data:</H3>"); Enumeration atts = session.getAttributeNames(); while (atts.hasMoreElements()) { String name = (String) atts.nextElement(); out.println(name + ": " + session.getAttribute(name) + "<BR>"); } out.println("<H3>Here are some vital stats on your session:</H3>"); out.println("Session id: " + session.getId() + " <I>(keep it secret)</I><BR>"); out.println("New session: " + session.isNew() + "<BR>"); out.println("Timeout: " + session.getMaxInactiveInterval()); out.println("<I>(" + session.getMaxInactiveInterval() / 60 + " minutes)</I><BR>"); out.println("Creation time: " + session.getCreationTime()); out.println("<I>(" + new Date(session.getCreationTime()) + ")</I><BR>"); out.println("Last access time: " + session.getLastAccessedTime()); out.println("<I>(" + new Date(session.getLastAccessedTime()) + ")</I><BR>"); out.println( "Requested session ID from cookie: " + req.isRequestedSessionIdFromCookie() + "<BR>"); out.println("Requested session ID from URL: " + req.isRequestedSessionIdFromURL() + "<BR>"); out.println("Requested session ID valid: " + req.isRequestedSessionIdValid() + "<BR>"); out.println("<H3>Test URL Rewriting</H3>"); out.println("Click <A HREF=\"" + res.encodeURL(req.getRequestURI()) + "\">here</A>"); out.println("to test that session tracking works via URL"); out.println("rewriting even when cookies aren't supported."); out.println("</BODY></HTML>"); } public static void showSession(HttpServletRequest req, PrintStream out) { // res.setContentType("text/html"); // Get the current session object, create one if necessary HttpSession session = req.getSession(); out.println("Session id: " + session.getId()); out.println(" session.isNew(): " + session.isNew()); out.println(" session.getMaxInactiveInterval(): " + session.getMaxInactiveInterval() + " secs"); out.println( " session.getCreationTime(): " + session.getCreationTime() + " (" + new Date(session.getCreationTime()) + ")"); out.println( " session.getLastAccessedTime(): " + session.getLastAccessedTime() + " (" + new Date(session.getLastAccessedTime()) + ")"); out.println(" req.isRequestedSessionIdFromCookie: " + req.isRequestedSessionIdFromCookie()); out.println(" req.isRequestedSessionIdFromURL: " + req.isRequestedSessionIdFromURL()); out.println(" req.isRequestedSessionIdValid: " + req.isRequestedSessionIdValid()); out.println("Saved session Attributes:"); Enumeration atts = session.getAttributeNames(); while (atts.hasMoreElements()) { String name = (String) atts.nextElement(); out.println(" " + name + ": " + session.getAttribute(name) + "<BR>"); } } public static String showSecurity(HttpServletRequest req, String role) { StringBuilder sbuff = new StringBuilder(); sbuff.append("Security Info\n"); sbuff.append(" req.getRemoteUser(): ").append(req.getRemoteUser()).append("\n"); sbuff.append(" req.getUserPrincipal(): ").append(req.getUserPrincipal()).append("\n"); sbuff .append(" req.isUserInRole(") .append(role) .append("):") .append(req.isUserInRole(role)) .append("\n"); sbuff.append(" ------------------\n"); return sbuff.toString(); } /* from luca / ageci code, portResolver, portMapper not known static public void getSecureRedirect(HttpServletRequest req) { String pathInfo = req.getPathInfo(); String queryString = req.getQueryString(); String contextPath = req.getContextPath(); String destination = req.getServletPath() + ((pathInfo == null) ? "" : pathInfo) + ((queryString == null) ? "" : ("?" + queryString)); String redirectUrl = contextPath; Integer httpPort = new Integer(portResolver.getServerPort(req)); Integer httpsPort = portMapper.lookupHttpsPort(httpPort); if (httpsPort != null) { boolean includePort = true; if (httpsPort.intValue() == 443) { includePort = false; } redirectUrl = "https://" + req.getServerName() + ((includePort) ? (":" + httpsPort) : "") + contextPath + destination; } } */ private static String getServerInfoName(String serverInfo) { int slash = serverInfo.indexOf('/'); if (slash == -1) return serverInfo; else return serverInfo.substring(0, slash); } private static String getServerInfoVersion(String serverInfo) { // Version info is everything between the slash and the space int slash = serverInfo.indexOf('/'); if (slash == -1) return null; int space = serverInfo.indexOf(' ', slash); if (space == -1) space = serverInfo.length(); return serverInfo.substring(slash + 1, space); } public static void showThreads(PrintStream pw) { Thread current = Thread.currentThread(); ThreadGroup group = current.getThreadGroup(); while (true) { if (group.getParent() == null) break; group = group.getParent(); } showThreads(pw, group, current); } private static void showThreads(PrintStream pw, ThreadGroup g, Thread current) { int nthreads = g.activeCount(); pw.println("\nThread Group = " + g.getName() + " activeCount= " + nthreads); Thread[] tarray = new Thread[nthreads]; int n = g.enumerate(tarray, false); for (int i = 0; i < n; i++) { Thread thread = tarray[i]; ClassLoader loader = thread.getContextClassLoader(); String loaderName = (loader == null) ? "Default" : loader.getClass().getName(); Thread.State state = thread.getState(); long id = thread.getId(); pw.print(" " + id + " " + thread.getName() + " " + state + " " + loaderName); if (thread == current) pw.println(" **** CURRENT ***"); else pw.println(); } int ngroups = g.activeGroupCount(); ThreadGroup[] garray = new ThreadGroup[ngroups]; int ng = g.enumerate(garray, false); for (int i = 0; i < ng; i++) { ThreadGroup nested = garray[i]; showThreads(pw, nested, current); } } }
/** * Build a GribCollection object. Rectilyse and manage grib collection index. Covers * GribCollectionProto. * * @author caron * @since 4/6/11 */ public class Grib2CollectionBuilder { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(GribCollection.class); public static final String MAGIC_START = "Grib2CollectionIndex"; protected static final int version = 5; private static final boolean debug = false; // from a single file, read in the index, create if it doesnt exist public static GribCollection createFromSingleFile( File file, CollectionManager.Force force, Formatter f) throws IOException { Grib2CollectionBuilder builder = new Grib2CollectionBuilder(file, f); builder.readOrCreateIndex(force, f); return builder.gc; } // from a collection, read in the index, create if it doesnt exist or is out of date // assume that the CollectionManager is up to date, eg doesnt need to be scanned public static GribCollection factory( CollectionManager dcm, CollectionManager.Force force, Formatter f) throws IOException { Grib2CollectionBuilder builder = new Grib2CollectionBuilder(dcm); builder.readOrCreateIndex(force, f); return builder.gc; } // read in the index, index raf already open public static GribCollection createFromIndex(String name, File directory, RandomAccessFile raf) throws IOException { Grib2CollectionBuilder builder = new Grib2CollectionBuilder(name, directory); if (builder.readIndex(raf)) return builder.gc; throw new IOException("Reading index failed"); } // this writes the index always public static boolean writeIndexFile(File indexFile, CollectionManager dcm, Formatter f) throws IOException { Grib2CollectionBuilder builder = new Grib2CollectionBuilder(dcm); return builder.createIndex(indexFile, CollectionManager.Force.always, f); } //////////////////////////////////////////////////////////////// private final List<CollectionManager> collections = new ArrayList<CollectionManager>(); protected GribCollection gc; // single file private Grib2CollectionBuilder(File file, Formatter f) throws IOException { try { // String spec = StringUtil2.substitute(file.getPath(), "\\", "/"); CollectionManager dcm = new DatasetCollectionSingleFile(file); this.collections.add(dcm); this.gc = new Grib2Collection(file.getName(), new File(dcm.getRoot())); } catch (Exception e) { ByteArrayOutputStream bos = new ByteArrayOutputStream(10000); e.printStackTrace(new PrintStream(bos)); f.format("%s", bos.toString()); throw new IOException(e); } } private Grib2CollectionBuilder(CollectionManager dcm) { this.collections.add(dcm); this.gc = new Grib2Collection(dcm.getCollectionName(), new File(dcm.getRoot())); } private Grib2CollectionBuilder(String name, File directory) { this.gc = new Grib2Collection(name, directory); } protected Grib2CollectionBuilder() { this.gc = null; } protected int getVersion() { return version; } // read or create index private void readOrCreateIndex(CollectionManager.Force ff, Formatter f) throws IOException { // force new index or test for new index needed boolean force = ((ff == CollectionManager.Force.always) || (ff == CollectionManager.Force.test && needsUpdate())); // otherwise, we're good as long as the index file exists File idx = gc.getIndexFile(); if (force || !idx.exists() || !readIndex(idx.getPath())) { logger.info("GribCollection {}: createIndex {}", gc.getName(), idx.getPath()); createIndex(idx, ff, f); // write out index gc.rafLocation = idx.getPath(); gc.setRaf(new RandomAccessFile(idx.getPath(), "r")); readIndex(gc.getRaf()); // read back in index } } public boolean needsUpdate() { File idx = gc.getIndexFile(); return !idx.exists() || needsUpdate(idx.lastModified()); } private boolean needsUpdate(long idxLastModified) { CollectionManager.ChangeChecker cc = Grib2Index.getChangeChecker(); for (CollectionManager dcm : collections) { for (MFile mfile : dcm.getFiles()) { if (cc.hasChangedSince(mfile, idxLastModified)) return true; } } return false; } //////////////////////////////////////////////////////////////////////////////////////////////////// // reading public boolean readIndex(String filename) throws IOException { return readIndex(new RandomAccessFile(filename, "r")); } public boolean readIndex(RandomAccessFile raf) { gc.setRaf(raf); // LOOK leaving the raf open in the GribCollection try { raf.order(RandomAccessFile.BIG_ENDIAN); raf.seek(0); //// header message if (!NcStream.readAndTest(raf, MAGIC_START.getBytes())) { logger.error("GribCollection {}: invalid index", gc.getName()); return false; } int v = raf.readInt(); if (v != getVersion()) { logger.warn( "GribCollection {}: index found version={}, want version= {} on file {}", new Object[] {gc.getName(), v, version, raf.getLocation()}); return false; } long skip = raf.readLong(); raf.skipBytes(skip); int size = NcStream.readVInt(raf); if ((size < 0) || (size > 100 * 1000 * 1000)) { logger.warn("GribCollection {}: invalid index ", gc.getName()); return false; } byte[] m = new byte[size]; raf.readFully(m); GribCollectionProto.GribCollectionIndex proto = GribCollectionProto.GribCollectionIndex.parseFrom(m); gc.center = proto.getCenter(); gc.subcenter = proto.getSubcenter(); gc.master = proto.getMaster(); gc.local = proto.getLocal(); gc.genProcessType = proto.getGenProcessType(); gc.genProcessId = proto.getGenProcessId(); gc.backProcessId = proto.getBackProcessId(); gc.local = proto.getLocal(); // gc.tables = Grib2Tables.factory(gc.center, gc.subcenter, gc.master, gc.local); gc.filenames = new ArrayList<String>(proto.getFilesCount()); for (int i = 0; i < proto.getFilesCount(); i++) gc.filenames.add(proto.getFiles(i)); // error condition on a GribCollection Index if ((proto.getFilesCount() == 0) && !(this instanceof TimePartitionBuilder)) { logger.warn("GribCollection {}: has no files, force recreate ", gc.getName()); return false; } gc.groups = new ArrayList<GribCollection.GroupHcs>(proto.getGroupsCount()); for (int i = 0; i < proto.getGroupsCount(); i++) gc.groups.add(readGroup(proto.getGroups(i), gc.makeGroup())); Collections.sort(gc.groups); gc.params = new ArrayList<Parameter>(proto.getParamsCount()); for (int i = 0; i < proto.getParamsCount(); i++) gc.params.add(readParam(proto.getParams(i))); if (!readPartitions(proto)) { logger.warn("TimePartition {}: has no partitions, force recreate ", gc.getName()); return false; } return true; } catch (Throwable t) { logger.error("Error reading index " + raf.getLocation(), t); return false; } } protected boolean readPartitions(GribCollectionProto.GribCollectionIndex proto) { return true; } protected void readTimePartitions( GribCollection.GroupHcs group, GribCollectionProto.Group proto) { // NOOP } GribCollection.GroupHcs readGroup(GribCollectionProto.Group p, GribCollection.GroupHcs group) throws IOException { Grib2SectionGridDefinition gdss = new Grib2SectionGridDefinition(p.getGds().toByteArray()); Grib2Gds gds = gdss.getGDS(); group.setHorizCoordSystem(gds.makeHorizCoordSys()); group.varIndex = new ArrayList<GribCollection.VariableIndex>(); for (int i = 0; i < p.getVariablesCount(); i++) group.varIndex.add(readVariable(p.getVariables(i), group)); Collections.sort(group.varIndex); group.timeCoords = new ArrayList<TimeCoord>(p.getTimeCoordsCount()); for (int i = 0; i < p.getTimeCoordsCount(); i++) group.timeCoords.add(readTimeCoord(p.getTimeCoords(i))); group.vertCoords = new ArrayList<VertCoord>(p.getVertCoordsCount()); for (int i = 0; i < p.getVertCoordsCount(); i++) group.vertCoords.add(readVertCoord(p.getVertCoords(i))); group.ensCoords = new ArrayList<EnsCoord>(p.getEnsCoordsCount()); for (int i = 0; i < p.getEnsCoordsCount(); i++) group.ensCoords.add(readEnsCoord(p.getEnsCoords(i))); group.filenose = new int[p.getFilenoCount()]; for (int i = 0; i < p.getFilenoCount(); i++) group.filenose[i] = p.getFileno(i); readTimePartitions(group, p); // finish for (GribCollection.VariableIndex vi : group.varIndex) { TimeCoord tc = group.timeCoords.get(vi.timeIdx); vi.ntimes = tc.getSize(); VertCoord vc = (vi.vertIdx < 0) ? null : group.vertCoords.get(vi.vertIdx); vi.nverts = (vc == null) ? 0 : vc.getSize(); EnsCoord ec = (vi.ensIdx < 0) ? null : group.ensCoords.get(vi.ensIdx); vi.nens = (ec == null) ? 0 : ec.getSize(); } // group.assignVertNames(); return group; } private Parameter readParam(GribCollectionProto.Parameter pp) throws IOException { if (pp.hasSdata()) return new Parameter(pp.getName(), pp.getSdata()); int count = 0; double[] vals = new double[pp.getDataCount()]; for (double val : pp.getDataList()) vals[count++] = val; return new Parameter(pp.getName(), vals); } private TimeCoord readTimeCoord(GribCollectionProto.Coord pc) throws IOException { if (pc.getBoundCount() > 0) { // its an interval List<TimeCoord.Tinv> coords = new ArrayList<TimeCoord.Tinv>(pc.getValuesCount()); for (int i = 0; i < pc.getValuesCount(); i++) coords.add(new TimeCoord.Tinv((int) pc.getValues(i), (int) pc.getBound(i))); return new TimeCoord(pc.getCode(), pc.getUnit(), coords); } else { List<Integer> coords = new ArrayList<Integer>(pc.getValuesCount()); for (float value : pc.getValuesList()) coords.add((int) value); return new TimeCoord(pc.getCode(), pc.getUnit(), coords); } } private VertCoord readVertCoord(GribCollectionProto.Coord pc) throws IOException { boolean isLayer = (pc.getBoundCount() > 0); List<VertCoord.Level> coords = new ArrayList<VertCoord.Level>(pc.getValuesCount()); for (int i = 0; i < pc.getValuesCount(); i++) coords.add(new VertCoord.Level(pc.getValues(i), isLayer ? pc.getBound(i) : 0)); return new VertCoord(pc.getCode(), coords, isLayer); } private EnsCoord readEnsCoord(GribCollectionProto.Coord pc) throws IOException { List<EnsCoord.Coord> coords = new ArrayList<EnsCoord.Coord>(pc.getValuesCount()); for (int i = 0; i < pc.getValuesCount(); i += 2) coords.add(new EnsCoord.Coord((int) pc.getValues(i), (int) pc.getValues(i + 1))); return new EnsCoord(coords); } protected GribCollection.VariableIndex readVariable( GribCollectionProto.Variable pv, GribCollection.GroupHcs group) { int discipline = pv.getDiscipline(); int category = pv.getCategory(); int param = pv.getParameter(); int levelType = pv.getLevelType(); int intvType = pv.getIntervalType(); boolean isLayer = pv.getIsLayer(); int ensDerivedType = pv.getEnsDerivedType(); int probType = pv.getProbabilityType(); String probabilityName = pv.getProbabilityName(); int cdmHash = pv.getCdmHash(); long recordsPos = pv.getRecordsPos(); int recordsLen = pv.getRecordsLen(); int timeIdx = pv.getTimeIdx(); int vertIdx = pv.getVertIdx(); int ensIdx = pv.getEnsIdx(); int tableVersion = pv.getTableVersion(); return gc.makeVariableIndex( group, tableVersion, discipline, category, param, levelType, isLayer, intvType, ensDerivedType, probType, probabilityName, cdmHash, timeIdx, vertIdx, ensIdx, recordsPos, recordsLen); } /////////////////////////////////////////////////////////////////////////////////// // writing private class Group { public Grib2SectionGridDefinition gdss; public int gdsHash; // may have been modified public Grib2Rectilyser rect; public List<Grib2Record> records = new ArrayList<Grib2Record>(); public String name; public Set<Integer> fileSet; // this is so we can show just the component files that are in this group private Group(Grib2SectionGridDefinition gdss, int gdsHash) { this.gdss = gdss; this.gdsHash = gdsHash; Grib2Gds gds = gdss.getGDS(); name = gds.getNameShort() + "-" + gds.ny + "X" + gds.nx; } } /////////////////////////////////////////////////// // create the index private boolean createIndex(File indexFile, CollectionManager.Force ff, Formatter f) throws IOException { long start = System.currentTimeMillis(); ArrayList<String> filenames = new ArrayList<String>(); List<Group> groups = makeAggregatedGroups(filenames, ff, f); createIndex(indexFile, groups, filenames, f); long took = System.currentTimeMillis() - start; f.format("That took %d msecs%n", took); return true; } // read all records in all files, // divide into groups based on GDS hash // each group has an arraylist of all records that belong to it. // for each group, run rectlizer to derive the coordinates and variables public List<Group> makeAggregatedGroups( ArrayList<String> filenames, CollectionManager.Force force, Formatter f) throws IOException { Map<Integer, Group> gdsMap = new HashMap<Integer, Group>(); f.format("GribCollection %s: makeAggregatedGroups%n", gc.getName()); int total = 0; int fileno = 0; for (CollectionManager dcm : collections) { // dcm.scanIfNeeded(); // LOOK ?? f.format(" dcm= %s%n", dcm); Map<Integer, Integer> gdsConvert = (Map<Integer, Integer>) dcm.getAuxInfo("gdsHash"); for (MFile mfile : dcm.getFiles()) { // f.format("%3d: %s%n", fileno, mfile.getPath()); filenames.add(mfile.getPath()); Grib2Index index = new Grib2Index(); try { if (!index.readIndex( mfile.getPath(), mfile.getLastModified(), force)) { // heres where the index date is checked against the data file index.makeIndex(mfile.getPath(), f); f.format( " Index written: %s == %d records %n", mfile.getName() + Grib2Index.IDX_EXT, index.getRecords().size()); } else if (debug) { f.format( " Index read: %s == %d records %n", mfile.getName() + Grib2Index.IDX_EXT, index.getRecords().size()); } } catch (IOException ioe) { f.format( "GribCollectionBuilder: reading/Creating gbx9 index failed err=%s%n skipping %s%n", ioe.getMessage(), mfile.getPath() + Grib2Index.IDX_EXT); continue; } for (Grib2Record gr : index.getRecords()) { gr.setFile(fileno); // each record tracks which file it belongs to int gdsHash = gr.getGDSsection().getGDS().hashCode(); // use GDS hash code to group records if (gdsConvert != null && gdsConvert.get(gdsHash) != null) { // allow external config to muck with gdsHash. Why? because of error in // encoding gdsHash = (Integer) gdsConvert.get(gdsHash); // and we need exact hash matching } Group g = gdsMap.get(gdsHash); if (g == null) { g = new Group(gr.getGDSsection(), gdsHash); gdsMap.put(gdsHash, g); } g.records.add(gr); total++; } fileno++; } } f.format(" total grib records= %d%n", total); Grib2Rectilyser.Counter c = new Grib2Rectilyser.Counter(); List<Group> result = new ArrayList<Group>(gdsMap.values()); for (Group g : result) { g.rect = new Grib2Rectilyser(g.records, g.gdsHash); f.format(" GDS hash %d == ", g.gdsHash); g.rect.make(f, c); } f.format( " Rectilyser: nvars=%d records unique=%d total=%d dups=%d (%f) %n", c.vars, c.recordsUnique, c.records, c.dups, ((float) c.dups) / c.records); return result; } /* MAGIC_START version sizeRecords VariableRecords (sizeRecords bytes) sizeIndex GribCollectionIndex (sizeIndex bytes) */ private void createIndex( File indexFile, List<Group> groups, ArrayList<String> filenames, Formatter f) throws IOException { Grib2Record first = null; // take global metadata from here if (indexFile.exists()) indexFile.delete(); // replace it f.format(" createIndex for %s%n", indexFile.getPath()); RandomAccessFile raf = new RandomAccessFile(indexFile.getPath(), "rw"); raf.order(RandomAccessFile.BIG_ENDIAN); try { //// header message raf.write(MAGIC_START.getBytes("UTF-8")); raf.writeInt(version); long lenPos = raf.getFilePointer(); raf.writeLong(0); // save space to write the length of the record section long countBytes = 0; int countRecords = 0; for (Group g : groups) { g.fileSet = new HashSet<Integer>(); for (Grib2Rectilyser.VariableBag vb : g.rect.getGribvars()) { if (first == null) first = vb.first; GribCollectionProto.VariableRecords vr = writeRecordsProto(vb, g.fileSet); byte[] b = vr.toByteArray(); vb.pos = raf.getFilePointer(); vb.length = b.length; raf.write(b); countBytes += b.length; countRecords += vb.recordMap.length; } } long bytesPerRecord = countBytes / ((countRecords == 0) ? 1 : countRecords); f.format( " write RecordMaps: bytes = %d record = %d bytesPerRecord=%d%n", countBytes, countRecords, bytesPerRecord); if (first == null) { logger.error("GribCollection {}: has no files\n{}", gc.getName(), f.toString()); throw new IllegalArgumentException("GribCollection " + gc.getName() + " has no files"); } long pos = raf.getFilePointer(); raf.seek(lenPos); raf.writeLong(countBytes); raf.seek(pos); // back to the output. GribCollectionProto.GribCollectionIndex.Builder indexBuilder = GribCollectionProto.GribCollectionIndex.newBuilder(); indexBuilder.setName(gc.getName()); for (String fn : filenames) indexBuilder.addFiles(fn); for (Group g : groups) indexBuilder.addGroups(writeGroupProto(g)); /* int count = 0; for (DatasetCollectionManager dcm : collections) { indexBuilder.addParams(makeParamProto(new Parameter("spec" + count, dcm.()))); count++; } */ // what about just storing first ?? Grib2SectionIdentification ids = first.getId(); indexBuilder.setCenter(ids.getCenter_id()); indexBuilder.setSubcenter(ids.getSubcenter_id()); indexBuilder.setMaster(ids.getMaster_table_version()); indexBuilder.setLocal(ids.getLocal_table_version()); Grib2Pds pds = first.getPDS(); indexBuilder.setGenProcessType(pds.getGenProcessType()); indexBuilder.setGenProcessId(pds.getGenProcessId()); indexBuilder.setBackProcessId(pds.getBackProcessId()); GribCollectionProto.GribCollectionIndex index = indexBuilder.build(); byte[] b = index.toByteArray(); NcStream.writeVInt(raf, b.length); // message size raf.write(b); // message - all in one gulp f.format(" write GribCollectionIndex= %d bytes%n", b.length); } finally { f.format(" file size = %d bytes%n", raf.length()); raf.close(); if (raf != null) raf.close(); } } /* private void createIndexForGroup(Group group, ArrayList<String> filenames) throws IOException { Grib2Record first = null; // take global metadata from here File file = new File(gc.getDirectory(), group.name + GribCollection.IDX_EXT); if (file.exists()) file.delete(); // replace it RandomAccessFile raf = new RandomAccessFile(file.getPath(), "rw"); raf.order(RandomAccessFile.BIG_ENDIAN); try { //// header message String magic = gc.getMagicBytes(); raf.write(magic.getBytes("UTF-8")); raf.writeInt(version); long lenPos = raf.getFilePointer(); raf.writeLong(0); // save space to write the length of the record section long countBytes = 0; int countRecords = 0; group.fileSet = new HashSet<Integer>(); for (Rectilyser.VariableBag vb : group.rect.getGribvars()) { if (first == null) first = vb.first; GribCollectionProto.VariableRecords vr = makeRecordsProto(vb, group.fileSet); byte[] b = vr.toByteArray(); vb.pos = raf.getFilePointer(); vb.length = b.length; raf.write(b); countBytes += b.length; } countRecords += group.records.size(); if (countRecords == 0) countRecords = 1; long bytesPerRecord = countBytes / countRecords; logger.debug("VariableRecords: bytes = {} record = {} bytesPerRecord={}", new Object[] {countBytes, countRecords, bytesPerRecord}); long pos = raf.getFilePointer(); raf.seek(lenPos); raf.writeLong(countBytes); raf.seek(pos); // back to the output. GribCollectionProto.GribCollectionIndex.Builder indexBuilder = GribCollectionProto.GribCollectionIndex.newBuilder(); indexBuilder.setName(group.name); for (String fn : filenames) indexBuilder.addFiles(fn); indexBuilder.addGroups(makeGroupProto(group)); int count = 0; for (CollectionManager dcm : collections) { indexBuilder.addParams(makeParamProto(new Parameter("spec" + count, dcm.toString()))); count++; } Grib2SectionIdentification ids = first.getId(); indexBuilder.setCenter(ids.getCenter_id()); indexBuilder.setSubcenter(ids.getSubcenter_id()); indexBuilder.setMaster(ids.getMaster_table_version()); indexBuilder.setLocal(ids.getLocal_table_version()); GribCollectionProto.GribCollectionIndex index = indexBuilder.build(); byte[] b = index.toByteArray(); NcStream.writeVInt(raf, b.length); // message size raf.write(b); // message - all in one gulp logger.debug("GribCollectionIndex= {} bytes%n", b.length); } finally { logger.debug("file size = {} bytes%n", raf.length()); raf.close(); if (raf != null) raf.close(); } } */ private GribCollectionProto.VariableRecords writeRecordsProto( Grib2Rectilyser.VariableBag vb, Set<Integer> fileSet) throws IOException { GribCollectionProto.VariableRecords.Builder b = GribCollectionProto.VariableRecords.newBuilder(); b.setCdmHash(vb.first.cdmVariableHash(0)); for (Grib2Rectilyser.Record ar : vb.recordMap) { GribCollectionProto.Record.Builder br = GribCollectionProto.Record.newBuilder(); if (ar == null || ar.gr == null) { br.setFileno(0); br.setPos(0); // missing : ok to use 0 since drsPos > 0 } else { br.setFileno(ar.gr.getFile()); fileSet.add(ar.gr.getFile()); Grib2SectionDataRepresentation drs = ar.gr.getDataRepresentationSection(); br.setPos(drs.getStartingPosition()); } b.addRecords(br); } return b.build(); } private GribCollectionProto.Group writeGroupProto(Group g) throws IOException { GribCollectionProto.Group.Builder b = GribCollectionProto.Group.newBuilder(); b.setGds(ByteString.copyFrom(g.gdss.getRawBytes())); for (Grib2Rectilyser.VariableBag vb : g.rect.getGribvars()) b.addVariables(writeVariableProto(vb)); List<TimeCoord> timeCoords = g.rect.getTimeCoords(); for (int i = 0; i < timeCoords.size(); i++) b.addTimeCoords(writeCoordProto(timeCoords.get(i), i)); List<VertCoord> vertCoords = g.rect.getVertCoords(); for (int i = 0; i < vertCoords.size(); i++) b.addVertCoords(writeCoordProto(vertCoords.get(i), i)); List<EnsCoord> ensCoords = g.rect.getEnsCoords(); for (int i = 0; i < ensCoords.size(); i++) b.addEnsCoords(writeCoordProto(ensCoords.get(i), i)); for (Integer aFileSet : g.fileSet) b.addFileno(aFileSet); return b.build(); } private GribCollectionProto.Variable writeVariableProto(Grib2Rectilyser.VariableBag vb) throws IOException { GribCollectionProto.Variable.Builder b = GribCollectionProto.Variable.newBuilder(); b.setDiscipline(vb.first.getDiscipline()); Grib2Pds pds = vb.first.getPDS(); b.setCategory(pds.getParameterCategory()); b.setParameter(pds.getParameterNumber()); b.setLevelType(pds.getLevelType1()); b.setIsLayer(Grib2Utils.isLayer(vb.first)); b.setIntervalType(pds.getStatisticalProcessType()); b.setCdmHash(vb.first.cdmVariableHash(0)); b.setRecordsPos(vb.pos); b.setRecordsLen(vb.length); b.setTimeIdx(vb.timeCoordIndex); if (vb.vertCoordIndex >= 0) b.setVertIdx(vb.vertCoordIndex); if (vb.ensCoordIndex >= 0) b.setEnsIdx(vb.ensCoordIndex); if (pds.isEnsembleDerived()) { Grib2Pds.PdsEnsembleDerived pdsDerived = (Grib2Pds.PdsEnsembleDerived) pds; b.setEnsDerivedType(pdsDerived.getDerivedForecastType()); // derived type (table 4.7) } if (pds.isProbability()) { Grib2Pds.PdsProbability pdsProb = (Grib2Pds.PdsProbability) pds; b.setProbabilityName(pdsProb.getProbabilityName()); b.setProbabilityType(pdsProb.getProbabilityType()); } return b.build(); } protected GribCollectionProto.Parameter writeParamProto(Parameter param) throws IOException { GribCollectionProto.Parameter.Builder b = GribCollectionProto.Parameter.newBuilder(); b.setName(param.getName()); if (param.isString()) b.setSdata(param.getStringValue()); else { for (int i = 0; i < param.getLength(); i++) b.addData(param.getNumericValue(i)); } return b.build(); } protected GribCollectionProto.Coord writeCoordProto(TimeCoord tc, int index) throws IOException { GribCollectionProto.Coord.Builder b = GribCollectionProto.Coord.newBuilder(); b.setCode(index); b.setUnit(tc.getUnits()); float scale = (float) tc.getTimeUnitScale(); // deal with, eg, "6 hours" by multiplying values by 6 if (tc.isInterval()) { for (TimeCoord.Tinv tinv : tc.getIntervals()) { b.addValues(tinv.getBounds1() * scale); b.addBound(tinv.getBounds2() * scale); } } else { for (int value : tc.getCoords()) b.addValues(value * scale); } return b.build(); } protected GribCollectionProto.Coord writeCoordProto(VertCoord vc, int index) throws IOException { GribCollectionProto.Coord.Builder b = GribCollectionProto.Coord.newBuilder(); b.setCode(vc.getCode()); b.setUnit(vc.getUnits()); for (VertCoord.Level coord : vc.getCoords()) { if (vc.isLayer()) { b.addValues((float) coord.getValue1()); b.addBound((float) coord.getValue2()); } else { b.addValues((float) coord.getValue1()); } } return b.build(); } protected GribCollectionProto.Coord writeCoordProto(EnsCoord ec, int index) throws IOException { GribCollectionProto.Coord.Builder b = GribCollectionProto.Coord.newBuilder(); b.setCode(0); b.setUnit(""); for (EnsCoord.Coord coord : ec.getCoords()) { b.addValues((float) coord.getCode()); b.addValues((float) coord.getEnsMember()); } return b.build(); } }
public class RC { static boolean showlog = false; /* do not do any logging */ public static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(RC.class); ////////////////////////////////////////////////// // Predefined flags // To add a new flag: // 1. choose a name for the flag // 2. Define the protected static field with default value // 3. Define a get function // 4. Add an arm to the set function // 5. Add any usefull utilities like booleanize() public static final String USEGROUPSKEY = "ucar.nc2.cdm.usegroups"; public static final String VERIFYSERVERKEY = "ucar.nc2.net.verifyserver"; public static final String ALLOWSELFSIGNEDKEY = "ucar.nc2.net.allowselfsigned"; protected static boolean useGroups = true; protected static boolean verifyServer = false; protected static boolean allowSelfSigned = true; public static boolean getUseGroups() { if (!initialized) RC.initialize(); return useGroups; } public static boolean getVerifyServer() { if (!initialized) RC.initialize(); return verifyServer; } public static boolean getAllowSelfSigned() { if (!initialized) RC.initialize(); return allowSelfSigned; } public static void set(String key, String value) { // TODO: think about the rc properties naming hierarchy assert (key != null); if (USEGROUPSKEY.equals(key)) { useGroups = booleanize(value); } else if (VERIFYSERVERKEY.equals(key)) { verifyServer = booleanize(value); } else if (ALLOWSELFSIGNEDKEY.equals(key)) { allowSelfSigned = booleanize(value); } } static boolean booleanize(String value) { // canonical boolean values if (value == null || "0".equals(value) || "false".equalsIgnoreCase(value)) return false; if (value.length() == 0 || "1".equals(value) || "true".equalsIgnoreCase(value)) return true; return value != null; // any non-null value? } ////////////////////////////////////////////////// static final String DFALTRCFILE = ".threddsrc"; static final char LTAG = '['; static final char RTAG = ']'; static final String[] rcfilelist = new String[] {".dodsrc", ".tdsrc"}; static int urlCompare(URL u1, URL u2) { int relation; if (u1 == null && u2 == null) return 0; if (u1 == null) return -1; if (u2 == null) return +1; // 1. host test String host1 = (new StringBuilder(u1.getHost())).reverse().toString(); String host2 = (new StringBuilder(u2.getHost())).reverse().toString(); // Use lexical order on the reversed host names relation = host1.compareTo(host2); if (relation != 0) return relation; // 2. path test relation = (u1.getPath().compareTo(u2.getPath())); if (relation != 0) return relation; // 3. port number relation = (u1.getPort() - u2.getPort()); if (relation != 0) return relation; // 4. note: all other fields are ignored return 0; } // Match has different semantics than urlCompare static boolean urlMatch(URL pattern, URL url) { int relation; if (pattern == null) return (url == null); if (!(url.getHost().endsWith(pattern.getHost()))) return false; // e.g. pattern=x.y.org url=y.org if (!(url.getPath().startsWith(pattern.getPath()))) return false; // e.g. pattern=y.org/a/b url=y.org/a if (pattern.getPort() > 0 && pattern.getPort() != url.getPort()) return false; // note: all other fields are ignored return true; } public static class Triple implements Comparable { public String key; // also sort key public String value; public URL url; public Triple(String key, String value, String url) { URL u = null; if (url != null && url.length() > 0) try { u = new URL(url); } catch (MalformedURLException e) { u = null; } set(key, value, u); } public Triple(String key, String value, URL url) { set(key, value, url); } void set(String key, String value, URL url) { this.key = key.trim().toLowerCase(); this.url = url; this.value = value; if (this.value == null) this.value = ""; } public boolean equals(Object o) { if (o == null || !(o instanceof Triple)) return false; return (compareTo((Triple) o) == 0); } public int compareTo(Object o) { if (o == null) throw new NullPointerException(); Triple t = (Triple) o; int relation = key.compareTo(t.key); if (relation != 0) return relation; relation = urlCompare(this.url, t.url); return relation; } // toString produces an rc line public String toString() { StringBuilder line = new StringBuilder(); if (url != null) { line.append("["); line.append(url.toString()); line.append("]"); } line.append(key); line.append("="); line.append(value); return line.toString(); } } // Define a singlton RC instance for general global use static RC dfaltRC = null; private static boolean initialized = false; static { RC.initialize(); } public static synchronized void initialize() { if (!initialized) { initialized = true; RC.loadDefaults(); RC.setWellKnown(); RC.loadFromJava(); } } /** * Allow users to add to the default rc * * @param key * @param value * @param url null => not url specific */ public static synchronized void add(String key, String value, String url) { if (key == null) return; if (!initialized) RC.initialize(); Triple t = new Triple(key, value, url); dfaltRC.insert(t); // recompute well-knowns setWellKnown(); } /** * Allow users to search the default rc * * @param key * @param url null => not url specific * @return value corresponding to key+url, or null if does not exist */ public static synchronized String find(String key, String url) { if (key == null) return null; if (!initialized) RC.initialize(); Triple t = dfaltRC.lookup(key, url); return (t == null ? null : t.value); } /** Record some well known parameters */ static void setWellKnown() { if (dfaltRC.triplestore.size() == 0) return; // Walk the set of triples looking for those that have no url for (String key : dfaltRC.keySet()) { Triple triple = dfaltRC.lookup(key); if (triple.url == null) { RC.set(key, triple.value); // let set sort it out } } } static void loadDefaults() { RC rc0 = new RC(); String[] locations = new String[] { System.getProperty("user.home"), System.getProperty("user.dir"), }; boolean found1 = false; for (String loc : locations) { if (loc == null) continue; String dir = loc.replace('\\', '/'); if (dir.endsWith("/")) dir = dir.substring(0, dir.length() - 1); for (String rcpath : rcfilelist) { String filepath = loc + "/" + rcpath; if (rc0.load(filepath)) found1 = true; } } if (!found1) if (showlog) log.debug("No .rc file found"); dfaltRC = rc0; } static void loadFromJava() { String[] flags = new String[] {USEGROUPSKEY, VERIFYSERVERKEY, ALLOWSELFSIGNEDKEY}; for (String flag : flags) { String value = System.getProperty(flag); if (value != null) { set(flag, value); } } } static RC getDefault() { return dfaltRC; } ////////////////////////////////////////////////// // Instance Data Map<String, List<Triple>> triplestore; ////////////////////////////////////////////////// // constructors public RC() { triplestore = new HashMap<String, List<Triple>>(); } ////////////////////////////////////////////////// // Loaders // Load this triple store from an rc file // overwrite existing entries public boolean load(String abspath) { abspath = abspath.replace('\\', '/'); File rcFile = new File(abspath); if (!rcFile.exists() || !rcFile.canRead()) { return false; } if (showlog) log.debug("Loading rc file: " + abspath); try (BufferedReader rdr = new BufferedReader(new InputStreamReader(new FileInputStream(rcFile), CDM.UTF8))) { for (int lineno = 1; ; lineno++) { URL url = null; String line = rdr.readLine(); if (line == null) break; // trim leading blanks line = line.trim(); if (line.length() == 0) continue; // empty line if (line.charAt(0) == '#') continue; // check for comment // parse the line if (line.charAt(0) == LTAG) { int rindex = line.indexOf(RTAG); if (rindex < 0) return false; if (showlog) log.error("Malformed [url] at " + abspath + "." + lineno); String surl = line.substring(1, rindex); try { url = new URL(surl); } catch (MalformedURLException mue) { if (showlog) log.error("Malformed [url] at " + abspath + "." + lineno); } line = line.substring(rindex + 1); // trim again line = line.trim(); } // Get the key,value part String[] pieces = line.split("\\s*=\\s*"); assert (pieces.length == 1 || pieces.length == 2); // Create the triple String value = "1"; if (pieces.length == 2) value = pieces[1].trim(); Triple triple = new Triple(pieces[0].trim(), value, url); List<Triple> list = triplestore.get(triple.key); if (list == null) list = new ArrayList<Triple>(); Triple prev = addtriple(list, triple); triplestore.put(triple.key, list); } } catch (FileNotFoundException fe) { if (showlog) log.debug("Loading rc file: " + abspath); return false; } catch (IOException ioe) { if (showlog) log.error("File " + abspath + ": IO exception: " + ioe.getMessage()); return false; } return true; } public Set<String> keySet() { return triplestore.keySet(); } public List<Triple> getTriples(String key) { List<Triple> list = triplestore.get(key); if (list == null) list = new ArrayList<Triple>(); return list; } public Triple lookup(String key) { return lookup(key, (URL) null); } public Triple lookup(String key, String url) { if (url == null || url.length() == 0) return lookup(key); try { URL u = new URL(url); return lookup(key, u); } catch (MalformedURLException m) { } return null; } public Triple lookup(String key, URL url) { List<Triple> list = triplestore.get(key); if (list == null) return null; if (url == null) { if (list.size() == 0) return null; return list.get(0); } else for (Triple t : list) { if (urlMatch(t.url, url)) return t; } return null; } Triple addtriple(List<Triple> list, Triple triple) { Triple prev = null; assert (list != null); // Look for duplicates int i = list.indexOf(triple); if (i >= 0) { prev = list.remove(i); } list.add(triple); Collections.sort(list); return prev; } // Allow for external loading public Triple insert(Triple t) { if (t.key == null) return null; List<Triple> list = triplestore.get(t.key); if (list == null) list = new ArrayList<Triple>(); Triple prev = addtriple(list, t); triplestore.put(t.key, list); return prev; } // Output in .rc form public String toString() { StringBuilder rc = new StringBuilder(); for (String key : keySet()) { List<Triple> list = getTriples(key); for (Triple triple : list) { String line = triple.toString(); rc.append(line); rc.append("\n"); } } return rc.toString(); } } // class RC
/** * A session is encapsulated in an instance of the class HTTPSession. The encapsulation is with * respect to a specific HttpHost "realm", where the important part is is host+port. This means that * once a session is specified, it is tied permanently to that realm. * * <p>A Session encapsulate a number of other objects: * * <ul> * <li>An instance of an Apache HttpClient. * <li>A http session id * <li>A RequestContext object; this also includes authentication: specifically a credential and a * credentials provider. * </ul> * * <p>Currently, it is assumed that only one set of credentials is needed, whether directly for * server X or for server Y. This may change in the future. * * <p>As a rule, if the client gives an HTTPSession object to the "create method" procedures of * HTTPFactory (e.g. HTTPFactory.Get or HTTPFactory.Post) then that creation call must specify a url * that is "compatible" with the scope of the session. The method url is <it>compatible</i> if its * host+port is the same as the session's host+port (=scope) and its scheme is compatible, where * e.g. http is compatible with https (see HTTPAuthUtil.httphostCompatible) * * <p>If the HTTPFactory method creation call does not specify a session object, then one is created * (and destroyed) behind the scenes along with the method. * * <p>Note that the term legalurl in the following code means that the url has reserved characters * within identifieers in escaped form. This is particularly and issue for queries. Especially: * ?x[0:5] is legal and the square brackets need not be encoded. * * <p>As of the move to Apache Httpclient 4.4 and later, the underlying HttpClient objects are * generally immutable. This means that at least this class (HTTPSession) and the HTTPMethod class * must store the relevant info and create the HttpClient and HttpMethod objects dynamically. This * also means that when a parameter is changed (Agent, for example), any existing cached HttpClient * must be thrown away and reconstructed using the change. As a rule, the HttpClient object will be * created at the last minute so that multiple parameter changes can be effected without have to * re-create the HttpClient for each parameter change. Also note that the immutable objects will be * cached and reused if no parameters are changed. * * <p><em>Authorization</em> We assume that the session supports two CredentialsProvider instances: * one global to all HTTPSession objects and one specific to each HTTPSession object. * * <p>As an aside, authentication is a bit tricky because some authorization schemes use * redirection. That is, the initial request is made to server X, but X says: goto to server Y" to * get, say, and authorization token. Then Y says: return to X with this token and proceed. * * <p><em>SSL</em> TBD. */ public class HTTPSession implements Closeable { ////////////////////////////////////////////////// // Constants // Define all the legal properties // Previously taken from class AllClientPNames, but that is now // deprecated, so just use an enum static /*package*/ enum Prop { ALLOW_CIRCULAR_REDIRECTS, HANDLE_REDIRECTS, HANDLE_AUTHENTICATION, MAX_REDIRECTS, MAX_THREADS, SO_TIMEOUT, CONN_TIMEOUT, CONN_REQ_TIMEOUT, USER_AGENT, COOKIE_STORE, RETRIES, UNAVAILRETRIES, COMPRESSION, CREDENTIALS, USESESSIONS, } // Header names // from: http://en.wikipedia.org/wiki/List_of_HTTP_header_fields public static final String HEADER_USERAGENT = "User-Agent"; public static final String ACCEPT_ENCODING = "Accept-Encoding"; static final int DFALTTHREADCOUNT = 50; static final int DFALTREDIRECTS = 25; static final int DFALTCONNTIMEOUT = 1 * 60 * 1000; // 1 minutes (60000 milliseconds) static final int DFALTCONNREQTIMEOUT = DFALTCONNTIMEOUT; static final int DFALTSOTIMEOUT = 5 * 60 * 1000; // 5 minutes (300000 milliseconds) static final int DFALTRETRIES = 3; static final int DFALTUNAVAILRETRIES = 3; static final int DFALTUNAVAILINTERVAL = 3000; // 3 seconds static final String DFALTUSERAGENT = "/NetcdfJava/HttpClient4.4"; static final String[] KNOWNCOMPRESSORS = {"gzip", "deflate"}; ////////////////////////////////////////////////////////////////////////// static final boolean IGNORECERTS = false; ////////////////////////////////////////////////////////////////////////// // Type Declaration(s) // Support loose certificate acceptance static class LooseTrustStrategy extends TrustSelfSignedStrategy { @Override public boolean isTrusted(final X509Certificate[] chain, String authType) throws CertificateException { try { if (super.isTrusted(chain, authType)) return true; // check expiration dates for (X509Certificate x5 : chain) { try { x5.checkValidity(); } catch (CertificateExpiredException | CertificateNotYetValidException ce) { return true; } } } catch (CertificateException e) { return true; // temporary } return false; } } /** Sub-class HashTable<String,Object> for mnemonic convenience and for synchronized access. */ static class Settings extends Hashtable<Prop, Object> { public Settings() {} public Set<Prop> getKeys() { return keySet(); } public Object getParameter(Prop param) { return super.get(param); } public long getIntParameter(Prop param) { return (Long) super.get(param); } public Settings setParameter(Prop param, Object value) { super.put(param, value); return this; } public Object removeParameter(Prop param) { return super.remove(param); } } // For communication between HTTPSession.execute and HTTPMethod.execute. static /*package*/ class ExecState { public HttpRequestBase request = null; public CloseableHttpResponse response = null; } static /*package*/ enum Methods { Get("get"), Head("head"), Put("put"), Post("post"), Options("options"); private final String name; Methods(String name) { this.name = name; } public String getName() { return name; } } static class GZIPResponseInterceptor implements HttpResponseInterceptor { public void process(final HttpResponse response, final HttpContext context) throws HttpException, IOException { HttpEntity entity = response.getEntity(); if (entity != null) { Header ceheader = entity.getContentEncoding(); if (ceheader != null) { HeaderElement[] codecs = ceheader.getElements(); for (HeaderElement h : codecs) { if (h.getName().equalsIgnoreCase("gzip")) { response.setEntity(new GzipDecompressingEntity(response.getEntity())); return; } } } } } } static class DeflateResponseInterceptor implements HttpResponseInterceptor { public void process(final HttpResponse response, final HttpContext context) throws HttpException, IOException { HttpEntity entity = response.getEntity(); if (entity != null) { Header ceheader = entity.getContentEncoding(); if (ceheader != null) { HeaderElement[] codecs = ceheader.getElements(); for (HeaderElement h : codecs) { if (h.getName().equalsIgnoreCase("deflate")) { response.setEntity(new DeflateDecompressingEntity(response.getEntity())); return; } } } } } } static class ZipStreamFactory implements InputStreamFactory { // InputStreamFactory methods @Override public InputStream create(InputStream instream) throws IOException { return new ZipInputStream(instream, HTTPUtil.UTF8); } } static class GZIPStreamFactory implements InputStreamFactory { // InputStreamFactory methods @Override public InputStream create(InputStream instream) throws IOException { return new GZIPInputStream(instream); } } //////////////////////////////////////////////////////////////////////// // Static variables public static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(HTTPSession.class); protected static PoolingHttpClientConnectionManager connmgr; // Define a settings object to hold all the // settable values; there will be one // instance for global and one for local. static Settings globalsettings; // Define interceptor instances static List<HttpRequestInterceptor> reqintercepts = new ArrayList<HttpRequestInterceptor>(); static List<HttpResponseInterceptor> rspintercepts = new ArrayList<HttpResponseInterceptor>(); // This is a hack to suppress content-encoding headers from request protected static HttpResponseInterceptor CEKILL; // Debug Header interceptors protected static List<HttpRequestInterceptor> dbgreq = new ArrayList<>(); protected static List<HttpResponseInterceptor> dbgrsp = new ArrayList<>(); protected static Map<String, InputStreamFactory> contentDecoderMap; // public final HttpClientBuilder setContentDecoderRegistry(Map<String,InputStreamFactory> // contentDecoderMap) // Since can't access CredentialsProvider map, mimic protected static Map<AuthScope, CredentialsProvider> globalcreds = new HashMap<>(); // As taken from the command line, usually protected static KeyStore keystore = null; protected static KeyStore truststore = null; protected static String keypath = null; protected static String keypassword = null; protected static String trustpath = null; protected static String trustpassword = null; protected static SSLConnectionSocketFactory globalsslfactory = null; protected static Registry<ConnectionSocketFactory> sslregistry = null; protected static HttpHost httpproxy = null; protected static HttpHost httpsproxy = null; protected static String proxyuser = null; protected static String proxypwd = null; // For debugging protected static Boolean globaldebugheaders = null; static { // watch out: order is important for these initializers CEKILL = new HTTPUtil.ContentEncodingInterceptor(); contentDecoderMap = new HashMap<String, InputStreamFactory>(); contentDecoderMap.put("zip", new ZipStreamFactory()); contentDecoderMap.put("gzip", new GZIPStreamFactory()); globalsettings = new Settings(); setDefaults(globalsettings); processDFlags(); // Process all -D flags connmgr = new PoolingHttpClientConnectionManager(sslregistry); setGlobalUserAgent(DFALTUSERAGENT); // does not work setGlobalThreadCount(DFALTTHREADCOUNT); setGlobalConnectionTimeout(DFALTCONNTIMEOUT); setGlobalSoTimeout(DFALTSOTIMEOUT); } ////////////////////////////////////////////////////////////////////////// // Static Initialization // Provide defaults for a settings map protected static void setDefaults(Settings props) { if (false) { // turn off for now props.setParameter(Prop.HANDLE_AUTHENTICATION, Boolean.TRUE); } props.setParameter(Prop.HANDLE_REDIRECTS, Boolean.TRUE); props.setParameter(Prop.ALLOW_CIRCULAR_REDIRECTS, Boolean.TRUE); props.setParameter(Prop.MAX_REDIRECTS, (Integer) DFALTREDIRECTS); props.setParameter(Prop.SO_TIMEOUT, (Integer) DFALTSOTIMEOUT); props.setParameter(Prop.CONN_TIMEOUT, (Integer) DFALTCONNTIMEOUT); props.setParameter(Prop.CONN_REQ_TIMEOUT, (Integer) DFALTCONNREQTIMEOUT); props.setParameter(Prop.USER_AGENT, DFALTUSERAGENT); } static synchronized void processDFlags() { // SSL flags keypath = cleanproperty("keystore"); keypassword = cleanproperty("keystorepassword"); trustpath = cleanproperty("truststore"); trustpassword = cleanproperty("truststorepassword"); setGlobalSSLAuth(keypath, keypassword, trustpath, trustpassword); // Proxy flags String proxyurl = cleanproperty("proxyurl"); if (proxyurl != null) setGlobalProxy(proxyurl); else { // Check the java.net flags String proxyhost = cleanproperty("https.proxyHost"); if (proxyhost != null) { StringBuilder buf = new StringBuilder(); buf.append("https://"); buf.append(proxyhost); String proxyport = cleanproperty("https.proxyPort"); if (proxyport != null) { buf.append(":"); buf.append(proxyport); } setGlobalProxy(buf.toString()); } } } ////////////////////////////////////////////////////////////////////////// // Static Methods (Mostly global accessors) public static synchronized void setGlobalUserAgent(String userAgent) { globalsettings.setParameter(Prop.USER_AGENT, userAgent); } public static synchronized String getGlobalUserAgent() { return (String) globalsettings.getParameter(Prop.USER_AGENT); } public static synchronized void setGlobalThreadCount(int nthreads) { // globalsettings.setParameter(Prop.MAX_THREADS,nthreads); throw new UnsupportedOperationException( "HTTPSession.setGlobalThreadCount is currently not working"); } // Alias public static void setGlobalMaxConnections(int nthreads) { setGlobalThreadCount(nthreads); } public static synchronized int getGlobalThreadCount() { return connmgr.getMaxTotal(); } // Timeouts public static synchronized void setGlobalConnectionTimeout(int timeout) { if (timeout >= 0) { globalsettings.setParameter(Prop.CONN_TIMEOUT, (Integer) timeout); globalsettings.setParameter(Prop.CONN_REQ_TIMEOUT, (Integer) timeout); } } public static synchronized void setGlobalSoTimeout(int timeout) { if (timeout >= 0) globalsettings.setParameter(Prop.SO_TIMEOUT, (Integer) timeout); } /** Enable/disable redirection following Default is yes. */ public static synchronized void setGlobalFollowRedirects(boolean tf) { globalsettings.setParameter(Prop.HANDLE_REDIRECTS, (Boolean) tf); } /** * Set the max number of redirects to follow * * @param n */ public static synchronized void setGlobalMaxRedirects(int n) { if (n < 0) // validate throw new IllegalArgumentException("setMaxRedirects"); globalsettings.setParameter(Prop.MAX_REDIRECTS, n); } public static synchronized Object getGlobalSetting(String key) { return globalsettings.get(key); } ////////////////////////////////////////////////// // Compression public static synchronized void setGlobalCompression(String compressors) { if (globalsettings.getParameter(Prop.COMPRESSION) != null) removeGlobalCompression(); String compresslist = checkCompressors(compressors); if (HTTPUtil.nullify(compresslist) == null) throw new IllegalArgumentException("Bad compressors: " + compressors); globalsettings.setParameter(Prop.COMPRESSION, compresslist); HttpResponseInterceptor hrsi; if (compresslist.contains("gzip")) { hrsi = new GZIPResponseInterceptor(); rspintercepts.add(hrsi); } if (compresslist.contains("deflate")) { hrsi = new DeflateResponseInterceptor(); rspintercepts.add(hrsi); } } public static void removeGlobalCompression() { if (globalsettings.removeParameter(Prop.COMPRESSION) != null) { for (int i = rspintercepts.size() - 1; i >= 0; i--) { // walk backwards HttpResponseInterceptor hrsi = rspintercepts.get(i); if (hrsi instanceof GZIPResponseInterceptor || hrsi instanceof DeflateResponseInterceptor) rspintercepts.remove(i); } } } protected static synchronized String checkCompressors(String compressors) { // Syntactic check of compressors Set<String> cset = new HashSet<>(); compressors = compressors.replace(',', ' '); compressors = compressors.replace('\t', ' '); String[] pieces = compressors.split("[ ]+"); for (String p : pieces) { for (String c : KNOWNCOMPRESSORS) { if (p.equalsIgnoreCase(c)) { cset.add(c); break; } } } StringBuilder buf = new StringBuilder(); for (String s : cset) { if (buf.length() > 0) buf.append(","); buf.append(s); } return buf.toString(); } ////////////////////////////////////////////////// // Authorization /** * @param provider * @throws HTTPException */ public static void setGlobalCredentialsProvider(CredentialsProvider provider) throws HTTPException { setGlobalCredentialsProvider(provider, (AuthScope) null); } /** * This is the most general case * * @param provider the credentials provider * @param scope where to use it (i.e. on what host) * @throws HTTPException */ public static void setGlobalCredentialsProvider(CredentialsProvider provider, AuthScope scope) throws HTTPException { mapcreds(provider, scope, globalcreds); } /** * It is convenient to be able to directly set the Credentials (not the provider) when those * credentials are fixed. Scope defaults to ANY * * @param creds * @throws HTTPException */ public static void setGlobalCredentials(Credentials creds) throws HTTPException { setGlobalCredentials(creds, null); } /** * It is convenient to be able to directly set the Credentials (not the provider) when those * credentials are fixed. * * @param creds * @param scope where to use it (i.e. on what host) * @throws HTTPException */ public static void setGlobalCredentials(Credentials creds, AuthScope scope) throws HTTPException { assert (creds != null); if (scope == null) scope = AuthScope.ANY; CredentialsProvider provider = new BasicCredentialsProvider(); provider.setCredentials(scope, creds); setGlobalCredentialsProvider(provider, scope); } /* Make this externally accessible primarily for testing */ public static synchronized void setGlobalSSLAuth( String keypath, String keypassword, String trustpath, String trustpassword) { // load the stores if defined try { if (trustpath != null && trustpassword != null) { truststore = KeyStore.getInstance(KeyStore.getDefaultType()); try (FileInputStream instream = new FileInputStream(new File(trustpath))) { truststore.load(instream, trustpassword.toCharArray()); } } else truststore = null; if (keypath != null && keypassword != null) { keystore = KeyStore.getInstance(KeyStore.getDefaultType()); try (FileInputStream instream = new FileInputStream(new File(keypath))) { keystore.load(instream, keypassword.toCharArray()); } } else keystore = null; } catch (IOException | NoSuchAlgorithmException | CertificateException | KeyStoreException ex) { log.error("Illegal -D keystore parameters: " + ex.getMessage()); truststore = null; keystore = null; } try { // set up the context SSLContext scxt = null; if (IGNORECERTS) { scxt = SSLContext.getInstance("TLS"); TrustManager[] trust_mgr = new TrustManager[] { new X509TrustManager() { public X509Certificate[] getAcceptedIssuers() { return null; } public void checkClientTrusted(X509Certificate[] certs, String t) {} public void checkServerTrusted(X509Certificate[] certs, String t) {} } }; scxt.init( null, // key manager trust_mgr, // trust manager new SecureRandom()); // random number generator } else { SSLContextBuilder sslbuilder = SSLContexts.custom(); TrustStrategy strat = new LooseTrustStrategy(); if (truststore != null) sslbuilder.loadTrustMaterial(truststore, strat); else sslbuilder.loadTrustMaterial(strat); sslbuilder.loadTrustMaterial(truststore, new LooseTrustStrategy()); if (keystore != null) sslbuilder.loadKeyMaterial(keystore, keypassword.toCharArray()); scxt = sslbuilder.build(); } globalsslfactory = new SSLConnectionSocketFactory(scxt, new NoopHostnameVerifier()); RegistryBuilder rb = RegistryBuilder.<ConnectionSocketFactory>create(); rb.register("https", globalsslfactory); sslregistry = rb.build(); } catch (KeyStoreException | NoSuchAlgorithmException | KeyManagementException | UnrecoverableEntryException e) { log.error("Failed to set key/trust store(s): " + e.getMessage()); sslregistry = null; globalsslfactory = null; } } public static synchronized void setGlobalProxy(String proxyurl) { if (proxyurl == null) throw new IllegalArgumentException("Bad proxy URL: " + proxyurl); URI uri; try { uri = HTTPUtil.parseToURI(proxyurl); } catch (URISyntaxException e) { throw new IllegalArgumentException("Bad proxy URL: " + proxyurl); } if (uri.getScheme().equals("http")) httpproxy = new HttpHost(uri.getHost(), uri.getPort(), "http"); else if (uri.getScheme().equals("https")) httpsproxy = new HttpHost(uri.getHost(), uri.getPort(), "https"); String upw = uri.getUserInfo(); if (upw != null) { String[] pieces = upw.split("[:]"); if (pieces.length != 2 || HTTPUtil.nullify(pieces[0]) == null || HTTPUtil.nullify(pieces[1]) == null) throw new IllegalArgumentException("Bad userinfo: " + proxyurl); proxyuser = pieces[0]; proxypwd = pieces[1]; } } ////////////////////////////////////////////////// // Instance variables // Currently, the granularity of authorization is host+port. protected String sessionURI = null; // This is either a real url // or one constructed from an AuthScope protected URI scopeURI = null; // constructed protected AuthScope scope = null; protected boolean closed = false; // Since can't access CredentialsProvider map, mimic protected Map<AuthScope, CredentialsProvider> localcreds = new HashMap<>(); protected List<ucar.httpservices.HTTPMethod> methodList = new Vector<HTTPMethod>(); protected String identifier = "Session"; protected Settings localsettings = new Settings(); // We currently only allow the use of global interceptors // protected List<Object> intercepts = new ArrayList<Object>(); // current set of interceptors; // This context is re-used over all method executions so that we maintain // cookies, credentials, etc. // But we do need away to clear so that e.g. we can clear credentials cache protected HttpClientContext sessioncontext = HttpClientContext.create(); // cached and recreated as needed protected boolean cachevalid = false; // Are cached items up-to-date? protected CloseableHttpClient cachedclient = null; protected RequestConfig cachedconfig = null; protected URI requestURI = null; // full uri from the HTTPMethod call protected ExecState execution = new ExecState(); ////////////////////////////////////////////////// // Constructor(s) // All are package level so that only HTTPFactory can be used externally protected HTTPSession() throws HTTPException {} HTTPSession(String host, int port) throws HTTPException { init(new AuthScope(host, port, null, null), null); } HTTPSession(String uri) throws HTTPException { init(HTTPAuthUtil.uriToAuthScope(uri), uri); } HTTPSession(HttpHost httphost) throws HTTPException { init(HTTPAuthUtil.hostToAuthScope(httphost), null); } protected void init(AuthScope scope, String actualurl) throws HTTPException { assert (scope != null); if (actualurl != null) this.sessionURI = actualurl; else this.sessionURI = HTTPAuthUtil.authscopeToURI(scope).toString(); this.scope = scope; this.scopeURI = HTTPAuthUtil.authscopeToURI(scope); this.cachevalid = false; // Force build on first use this.sessioncontext.setCookieStore(new BasicCookieStore()); this.sessioncontext.setAttribute(HttpClientContext.AUTH_CACHE, new BasicAuthCache()); } ////////////////////////////////////////////////// // Interceptors: Only supported at global level protected static void setInterceptors(HttpClientBuilder cb) { for (HttpRequestInterceptor hrq : reqintercepts) { cb.addInterceptorLast(hrq); } for (HttpResponseInterceptor hrs : rspintercepts) { cb.addInterceptorLast(hrs); } // Add debug interceptors for (HttpRequestInterceptor hrq : dbgreq) { cb.addInterceptorFirst(hrq); } for (HttpResponseInterceptor hrs : dbgrsp) { cb.addInterceptorFirst(hrs); } // Hack: add Content-Encoding suppressor cb.addInterceptorFirst(CEKILL); } ////////////////////////////////////////////////// // Accessor(s) public AuthScope getAuthScope() { return this.scope; } public String getSessionURI() { return this.sessionURI; } /** * Extract the sessionid cookie value * * @return sessionid string */ public String getSessionID() { String sid = null; String jsid = null; List<Cookie> cookies = this.sessioncontext.getCookieStore().getCookies(); for (Cookie cookie : cookies) { if (cookie.getName().equalsIgnoreCase("sessionid")) sid = cookie.getValue(); if (cookie.getName().equalsIgnoreCase("jsessionid")) jsid = cookie.getValue(); } return (sid == null ? jsid : sid); } public HTTPSession setUserAgent(String agent) { if (agent == null || agent.length() == 0) throw new IllegalArgumentException("null argument"); localsettings.setParameter(Prop.USER_AGENT, agent); this.cachevalid = false; return this; } public HTTPSession setSoTimeout(int timeout) { if (timeout <= 0) throw new IllegalArgumentException("setSoTimeout"); localsettings.setParameter(Prop.SO_TIMEOUT, timeout); this.cachevalid = false; return this; } public HTTPSession setConnectionTimeout(int timeout) { if (timeout <= 0) throw new IllegalArgumentException("setConnectionTImeout"); localsettings.setParameter(Prop.CONN_TIMEOUT, timeout); localsettings.setParameter(Prop.CONN_REQ_TIMEOUT, timeout); this.cachevalid = false; return this; } /** * Set the max number of redirects to follow * * @param n */ public HTTPSession setMaxRedirects(int n) { if (n < 0) // validate throw new IllegalArgumentException("setMaxRedirects"); localsettings.setParameter(Prop.MAX_REDIRECTS, n); this.cachevalid = false; return this; } /** Enable/disable redirection following Default is yes. */ public HTTPSession setFollowRedirects(boolean tf) { localsettings.setParameter(Prop.HANDLE_REDIRECTS, (Boolean) tf); this.cachevalid = false; return this; } /** * Should we use sessionid's? * * @param tf */ public HTTPSession setUseSessions(boolean tf) { localsettings.setParameter(Prop.USESESSIONS, (Boolean) tf); this.cachevalid = false; return this; } public List<Cookie> getCookies() { if (this.sessioncontext == null) return null; List<Cookie> cookies = this.sessioncontext.getCookieStore().getCookies(); return cookies; } public HTTPSession clearCookies() { BasicCookieStore cookies = (BasicCookieStore) this.sessioncontext.getCookieStore(); if (cookies != null) cookies.clear(); return this; } public HTTPSession clearCredentialsCache() { BasicAuthCache ac = (BasicAuthCache) this.sessioncontext.getAttribute(HttpClientContext.AUTH_CACHE); if (ac != null) ac.clear(); return this; } // make package specific HttpClient getClient() { return this.cachedclient; } HttpClientContext getExecutionContext() { return this.sessioncontext; } public Object getSetting(String key) { return localsettings.get(key); } ////////////////////////////////////////////////// /** Close the session. This implies closing any open methods. */ public synchronized void close() { if (this.closed) return; // multiple calls ok while (methodList.size() > 0) { HTTPMethod m = methodList.get(0); m.close(); // forcibly close; will invoke removemethod(). } closed = true; } synchronized HTTPSession addMethod(HTTPMethod m) { if (!methodList.contains(m)) methodList.add(m); return this; } synchronized HTTPSession removeMethod(HTTPMethod m) { methodList.remove(m); return this; } ////////////////////////////////////////////////// // Authorization // per-session versions of the global accessors /** * @param provider * @throws HTTPException */ public HTTPSession setCredentialsProvider(CredentialsProvider provider) throws HTTPException { setCredentialsProvider(provider, null); return this; } /** * This is the most general case * * @param provider the credentials provider * @param scope where to use it (i.e. on what host+port) * @throws HTTPException */ public HTTPSession setCredentialsProvider(CredentialsProvider provider, AuthScope scope) throws HTTPException { mapcreds(provider, scope, localcreds); return this; } /** * It is convenient to be able to directly set the Credentials (not the provider) when those * credentials are fixed. Scope defaults to ANY * * @param creds * @throws HTTPException */ public HTTPSession setCredentials(Credentials creds) throws HTTPException { setCredentials(creds, null); return this; } /** * It is convenient to be able to directly set the Credentials (not the provider) when those * credentials are fixed. * * @param creds * @param scope where to use it (i.e. on what host) * @throws HTTPException */ public HTTPSession setCredentials(Credentials creds, AuthScope scope) throws HTTPException { assert (creds != null); if (scope == null) scope = AuthScope.ANY; CredentialsProvider provider = new BasicCredentialsProvider(); provider.setCredentials(scope, creds); setCredentialsProvider(provider, scope); return this; } ////////////////////////////////////////////////// // Execution (do an actual execution) // Package visible /** * Called primarily from HTTPMethod to do the bulk of the execution. Assumes HTTPMethod has * inserted its headers into request. * * @param method * @param methoduri * @param rb * @return Request+Response pair * @throws HTTPException */ ExecState execute(HTTPMethod method, URI methoduri, RequestBuilder rb) throws HTTPException { this.execution = new ExecState(); this.requestURI = methoduri; AuthScope methodscope = HTTPAuthUtil.uriToAuthScope(methoduri); AuthScope target = HTTPAuthUtil.authscopeUpgrade(this.scope, methodscope); synchronized (this) { // keep coverity happy // Merge Settings; Settings merged = HTTPUtil.merge(globalsettings, localsettings); if (!this.cachevalid) { RequestConfig.Builder rcb = RequestConfig.custom(); this.cachedconfig = configureRequest(rcb, merged); HttpClientBuilder cb = HttpClients.custom(); configClient(cb, merged); setAuthenticationAndProxy(cb); this.cachedclient = cb.build(); rb.setConfig(this.cachedconfig); this.cachevalid = true; } } this.execution.request = (HttpRequestBase) rb.build(); try { HttpHost targethost = HTTPAuthUtil.authscopeToHost(target); this.execution.response = cachedclient.execute(targethost, this.execution.request, this.sessioncontext); } catch (IOException ioe) { throw new HTTPException(ioe); } return this.execution; } protected RequestConfig configureRequest(RequestConfig.Builder rcb, Settings settings) throws HTTPException { // Configure the RequestConfig for (Prop key : settings.getKeys()) { Object value = settings.getParameter(key); boolean tf = (value instanceof Boolean ? (Boolean) value : false); if (key == Prop.ALLOW_CIRCULAR_REDIRECTS) { rcb.setCircularRedirectsAllowed(tf); } else if (key == Prop.HANDLE_REDIRECTS) { rcb.setRedirectsEnabled(tf); rcb.setRelativeRedirectsAllowed(tf); } else if (key == Prop.MAX_REDIRECTS) { rcb.setMaxRedirects((Integer) value); } else if (key == Prop.SO_TIMEOUT) { rcb.setSocketTimeout((Integer) value); } else if (key == Prop.CONN_TIMEOUT) { rcb.setConnectTimeout((Integer) value); } else if (key == Prop.CONN_REQ_TIMEOUT) { rcb.setConnectionRequestTimeout((Integer) value); } else if (key == Prop.MAX_THREADS) { connmgr.setMaxTotal((Integer) value); connmgr.setDefaultMaxPerRoute((Integer) value); } /* else ignore */ } RequestConfig cfg = rcb.build(); return cfg; } protected void configClient(HttpClientBuilder cb, Settings settings) throws HTTPException { cb.useSystemProperties(); String agent = (String) settings.get(Prop.USER_AGENT); if (agent != null) cb.setUserAgent(agent); setInterceptors(cb); cb.setContentDecoderRegistry(contentDecoderMap); } /** * Handle authentication and Proxy'ing * * @param cb * @throws HTTPException */ protected synchronized void setAuthenticationAndProxy(HttpClientBuilder cb) throws HTTPException { // First, setup the ssl factory cb.setSSLSocketFactory(globalsslfactory); // Second, Construct a CredentialsProvider that is // the union of the Proxy credentials plus // either the global local credentials; local overrides global // Unfortunately, we cannot either clone or extract the contents // of the client supplied provider, so we are forced (for now) // to modify the client supplied provider. // Look in the local authcreds for best scope match AuthScope bestMatch = HTTPAuthUtil.bestmatch(scope, localcreds.keySet()); CredentialsProvider cp = null; if (bestMatch != null) { cp = localcreds.get(bestMatch); } else { bestMatch = HTTPAuthUtil.bestmatch(scope, globalcreds.keySet()); if (bestMatch != null) cp = globalcreds.get(bestMatch); } // Build the proxy credentials and AuthScope Credentials proxycreds = null; AuthScope proxyscope = null; if (proxyuser != null && (httpproxy != null || httpsproxy != null)) { if (httpproxy != null) proxyscope = HTTPAuthUtil.hostToAuthScope(httpproxy); else // httpsproxy != null proxyscope = HTTPAuthUtil.hostToAuthScope(httpsproxy); proxycreds = new UsernamePasswordCredentials(proxyuser, proxypwd); } if (cp == null && proxycreds != null && proxyscope != null) { // If client provider is null and proxycreds are not, // then use proxycreds alone cp = new BasicCredentialsProvider(); cp.setCredentials(proxyscope, proxycreds); } else if (cp != null && proxycreds != null && proxyscope != null) { // If client provider is not null and proxycreds are not, // then add proxycreds to the client provider cp.setCredentials(proxyscope, proxycreds); } if (cp != null) this.sessioncontext.setCredentialsProvider(cp); } ////////////////////////////////////////////////// // Utilities Static and Per-Instance static String getCanonicalURL(String legalurl) { if (legalurl == null) return null; int index = legalurl.indexOf('?'); if (index >= 0) legalurl = legalurl.substring(0, index); // remove any trailing extension // index = legalurl.lastIndexOf('.'); // if(index >= 0) legalurl = legalurl.substring(0,index); return HTTPUtil.canonicalpath(legalurl); } static String getUrlAsString(String url) throws HTTPException { try (HTTPMethod m = HTTPFactory.Get(url); ) { int status = m.execute(); String content = null; if (status == 200) { content = m.getResponseAsString(); } return content; } } static int putUrlAsString(String content, String url) throws HTTPException { int status = 0; try { try (HTTPMethod m = HTTPFactory.Put(url)) { m.setRequestContent( new StringEntity(content, ContentType.create("application/text", "UTF-8"))); status = m.execute(); } } catch (UnsupportedCharsetException uce) { throw new HTTPException(uce); } return status; } static String getstorepath(String prefix) { String path = System.getProperty(prefix + "store"); if (path != null) { path = path.trim(); if (path.length() == 0) path = null; } return path; } static String getpassword(String prefix) { String password = System.getProperty(prefix + "storepassword"); if (password != null) { password = password.trim(); if (password.length() == 0) password = null; } return password; } static String cleanproperty(String property) { String value = System.getProperty(property); if (value != null) { value = value.trim(); if (value.length() == 0) value = null; } return value; } static void mapcreds( CredentialsProvider provider, AuthScope scope, Map<AuthScope, CredentialsProvider> authcreds) { assert (provider != null); if (scope == null) scope = AuthScope.ANY; authcreds.put(scope, provider); } ////////////////////////////////////////////////// // Testing support // Expose the state for testing purposes public synchronized boolean isClosed() { return this.closed; } public synchronized int getMethodcount() { return methodList.size(); } public RequestConfig getDebugConfig() { return (this.cachevalid ? this.cachedconfig : null); } public Header[] getRequestHeaders() { if (!this.cachevalid) return null; Header[] hdrs = null; if (this.execution.request != null) hdrs = this.execution.request.getAllHeaders(); return hdrs; } ////////////////////////////////////////////////// // Debug interface // Provide a way to kill everything at the end of a Test // When testing, we need to be able to clean up // all existing sessions because JUnit can run all // test within a single jvm. static List<HTTPSession> sessionList = null; // List of all HTTPSession instances // only used when testing flag is set public static boolean TESTING = false; // set to true during testing, should be false otherwise protected static synchronized void kill() { if (sessionList != null) { for (HTTPSession session : sessionList) { session.close(); } sessionList.clear(); // Rebuild the connection manager connmgr.shutdown(); connmgr = new PoolingHttpClientConnectionManager(sslregistry); setGlobalThreadCount(DFALTTHREADCOUNT); } } // If we are testing, then track the sessions for kill protected static synchronized void track(HTTPSession session) { if (sessionList == null) sessionList = new ArrayList<HTTPSession>(); sessionList.add(session); } public static synchronized void debugHeaders(boolean print) { HTTPUtil.InterceptRequest rq = new HTTPUtil.InterceptRequest(); HTTPUtil.InterceptResponse rs = new HTTPUtil.InterceptResponse(); rq.setPrint(print); rs.setPrint(print); /* remove any previous */ for (int i = reqintercepts.size() - 1; i >= 0; i--) { HttpRequestInterceptor hr = reqintercepts.get(i); if (hr instanceof HTTPUtil.InterceptCommon) reqintercepts.remove(i); } for (int i = rspintercepts.size() - 1; i >= 0; i--) { HttpResponseInterceptor hr = rspintercepts.get(i); if (hr instanceof HTTPUtil.InterceptCommon) rspintercepts.remove(i); } reqintercepts.add(rq); rspintercepts.add(rs); } public static void debugReset() { for (HttpRequestInterceptor hri : reqintercepts) { if (hri instanceof HTTPUtil.InterceptCommon) ((HTTPUtil.InterceptCommon) hri).clear(); } } public static HTTPUtil.InterceptRequest debugRequestInterceptor() { for (HttpRequestInterceptor hri : reqintercepts) { if (hri instanceof HTTPUtil.InterceptRequest) return ((HTTPUtil.InterceptRequest) hri); } return null; } public static HTTPUtil.InterceptResponse debugResponseInterceptor() { for (HttpResponseInterceptor hri : rspintercepts) { if (hri instanceof HTTPUtil.InterceptResponse) return ((HTTPUtil.InterceptResponse) hri); } return null; } ////////////////////////////////////////////////// // Deprecated, but here for back compatibility @Deprecated public static void setGlobalCredentialsProvider(AuthScope scope, CredentialsProvider provider) throws HTTPException { setGlobalCredentialsProvider(provider, scope); } @Deprecated public static void setGlobalCredentialsProvider(String url, CredentialsProvider provider) throws HTTPException { assert (url != null && provider != null); AuthScope scope = HTTPAuthUtil.uriToAuthScope(url); setGlobalCredentialsProvider(provider, scope); } @Deprecated public static void setGlobalCredentials(String url, Credentials creds) throws HTTPException { assert (url != null && creds != null); AuthScope scope = HTTPAuthUtil.uriToAuthScope(url); CredentialsProvider provider = new BasicCredentialsProvider(); provider.setCredentials(scope, creds); setGlobalCredentialsProvider(provider, scope); } @Deprecated public void setCredentials(String url, Credentials creds) throws HTTPException { assert (creds != null); AuthScope scope = HTTPAuthUtil.uriToAuthScope(url); setCredentials(creds, scope); } @Deprecated public void setCredentialsProvider(String url, CredentialsProvider provider) throws HTTPException { assert (url != null && provider != null); AuthScope scope = HTTPAuthUtil.uriToAuthScope(url); setCredentialsProvider(provider, scope); } @Deprecated public void setCredentialsProvider(AuthScope scope, CredentialsProvider provider) throws HTTPException { setCredentialsProvider(provider, scope); } @Deprecated public static int getRetryCount() { throw new UnsupportedOperationException(); } @Deprecated public static void setGlobalCompression() { setGlobalCompression("gzip,deflate"); } @Deprecated public static void setGlobalProxy(String host, int port) { try { URL u = new URL("http", host, port, null); setGlobalProxy(u.toString()); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } } @Deprecated public void setProxy(String host, int port) { setGlobalProxy(host, port); } @Deprecated public static void setGlobalCredentialsProvider(CredentialsProvider provider, String scheme) throws HTTPException { setGlobalCredentialsProvider(provider); } @Deprecated public static void setRetryCount(int count) { throw new UnsupportedOperationException(); } @Deprecated public void clearState() { // no-op } @Deprecated public String getSessionURL() { return getSessionURI(); } }
/** * This class reads a NEXRAD level II data file. It can handle NCDC archives (ARCHIVE2), as well as * CRAFT/IDD compressed files (AR2V0001). * * <p>Adapted with permission from the Java Iras software developed by David Priegnitz at NSSL. * * <p> * * <p>Documentation on Archive Level II data format can be found at: <a * href="http://www.ncdc.noaa.gov/oa/radar/leveliidoc.html"> * http://www.ncdc.noaa.gov/oa/radar/leveliidoc.html</a> * * @author caron * @author David Priegnitz */ public class Level2VolumeScan { // data formats public static final String ARCHIVE2 = "ARCHIVE2"; public static final String AR2V0001 = "AR2V0001"; public static final String AR2V0002 = "AR2V0002"; public static final String AR2V0003 = "AR2V0003"; public static final String AR2V0004 = "AR2V0004"; public static final String AR2V0006 = "AR2V0006"; private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Level2VolumeScan.class); //////////////////////////////////////////////////////////////////////////////////// // Data file RandomAccessFile raf; private String dataFormat = null; // ARCHIVE2 or AR2V0001 private int title_julianDay; // days since 1/1/70 private int title_msecs; // milliseconds since midnight private String stationId; // 4 letter station assigned by ICAO private NexradStationDB.Station station; // from lookup table, may be null private Level2Record first, last; private int vcp = 0; // Volume coverage pattern private int max_radials = 0; private int min_radials = Integer.MAX_VALUE; private int max_radials_hr = 0; private int min_radials_hr = Integer.MAX_VALUE; private int dopplarResolution; private boolean hasDifferentDopplarResolutions; private boolean hasHighResolutionData; private boolean hasHighResolutionREF; private boolean hasHighResolutionVEL; private boolean hasHighResolutionSW; private boolean hasHighResolutionZDR; private boolean hasHighResolutionPHI; private boolean hasHighResolutionRHO; // List of List of Level2Record private List<List<Level2Record>> reflectivityGroups, dopplerGroups; // private ArrayList reflectivityGroups, dopplerGroups; private List<List<Level2Record>> reflectivityHighResGroups; private List<List<Level2Record>> velocityHighResGroups; private List<List<Level2Record>> spectrumHighResGroups; private ArrayList diffReflectHighResGroups; private ArrayList diffPhaseHighResGroups; private ArrayList coefficientHighResGroups; private boolean showMessages = false, showData = false, debugScans = false, debugGroups2 = false, debugRadials = false, debugStats = false; private boolean runCheck = false; Level2VolumeScan(RandomAccessFile orgRaf, CancelTask cancelTask) throws IOException { this.raf = orgRaf; if (log.isDebugEnabled()) log.debug("Level2VolumeScan on " + raf.getLocation()); raf.seek(0); raf.order(RandomAccessFile.BIG_ENDIAN); // volume scan header dataFormat = raf.readString(8); raf.skipBytes(1); String volumeNo = raf.readString(3); title_julianDay = raf.readInt(); // since 1/1/70 title_msecs = raf.readInt(); stationId = raf.readString(4).trim(); // only in AR2V0001 if (log.isDebugEnabled()) log.debug(" dataFormat= " + dataFormat + " stationId= " + stationId); if (stationId.length() == 0) { // try to get it from the filename LOOK stationId = null; } // try to find the station if (stationId != null) { if (!stationId.startsWith("K") && stationId.length() == 4) { String _stationId = "K" + stationId; station = NexradStationDB.get(_stationId); } else station = NexradStationDB.get(stationId); } // see if we have to uncompress if (dataFormat.equals(AR2V0001) || dataFormat.equals(AR2V0003) || dataFormat.equals(AR2V0004) || dataFormat.equals(AR2V0006)) { raf.skipBytes(4); String BZ = raf.readString(2); if (BZ.equals("BZ")) { RandomAccessFile uraf; File uncompressedFile = DiskCache.getFileStandardPolicy(raf.getLocation() + ".uncompress"); if (uncompressedFile.exists() && uncompressedFile.length() > 0) { // see if its locked - another thread is writing it FileInputStream fstream = null; FileLock lock = null; try { fstream = new FileInputStream(uncompressedFile); // lock = fstream.getChannel().lock(0, 1, true); // wait till its unlocked while (true) { // loop waiting for the lock try { lock = fstream.getChannel().lock(0, 1, true); // wait till its unlocked break; } catch (OverlappingFileLockException oe) { // not sure why lock() doesnt block try { Thread.sleep(100); // msecs } catch (InterruptedException e1) { break; } } } } finally { if (lock != null) lock.release(); if (fstream != null) fstream.close(); } uraf = new ucar.unidata.io.RandomAccessFile(uncompressedFile.getPath(), "r"); } else { // nope, gotta uncompress it uraf = uncompress(raf, uncompressedFile.getPath()); if (log.isDebugEnabled()) log.debug("made uncompressed file= " + uncompressedFile.getPath()); } // switch to uncompressed file raf.close(); raf = uraf; raf.order(RandomAccessFile.BIG_ENDIAN); } raf.seek(Level2Record.FILE_HEADER_SIZE); } List<Level2Record> reflectivity = new ArrayList<Level2Record>(); List<Level2Record> doppler = new ArrayList<Level2Record>(); List<Level2Record> highReflectivity = new ArrayList<Level2Record>(); List<Level2Record> highVelocity = new ArrayList<Level2Record>(); List<Level2Record> highSpectrum = new ArrayList<Level2Record>(); List<Level2Record> highDiffReflectivity = new ArrayList<Level2Record>(); List<Level2Record> highDiffPhase = new ArrayList<Level2Record>(); List<Level2Record> highCorreCoefficient = new ArrayList<Level2Record>(); long message_offset31 = 0; int recno = 0; while (true) { Level2Record r = Level2Record.factory(raf, recno++, message_offset31); if (r == null) break; if (showData) r.dump2(System.out); // skip non-data messages if (r.message_type == 31) { message_offset31 = message_offset31 + (r.message_size * 2 + 12 - 2432); } if (r.message_type != 1 && r.message_type != 31) { if (showMessages) r.dumpMessage(System.out); continue; } // if (showData) r.dump2(System.out); /* skip bad if (!r.checkOk()) { r.dump(System.out); continue; } */ // some global params if (vcp == 0) vcp = r.vcp; if (first == null) first = r; last = r; if (runCheck && !r.checkOk()) { continue; } if (r.hasReflectData) reflectivity.add(r); if (r.hasDopplerData) doppler.add(r); if (r.message_type == 31) { if (r.hasHighResREFData) highReflectivity.add(r); if (r.hasHighResVELData) highVelocity.add(r); if (r.hasHighResSWData) highSpectrum.add(r); if (r.hasHighResZDRData) highDiffReflectivity.add(r); if (r.hasHighResPHIData) highDiffPhase.add(r); if (r.hasHighResRHOData) highCorreCoefficient.add(r); } if ((cancelTask != null) && cancelTask.isCancel()) return; } if (debugRadials) System.out.println(" reflect ok= " + reflectivity.size() + " doppler ok= " + doppler.size()); if (highReflectivity.size() == 0) { reflectivityGroups = sortScans("reflect", reflectivity, 600); dopplerGroups = sortScans("doppler", doppler, 600); } if (highReflectivity.size() > 0) reflectivityHighResGroups = sortScans("reflect_HR", highReflectivity, 720); if (highVelocity.size() > 0) velocityHighResGroups = sortScans("velocity_HR", highVelocity, 720); if (highSpectrum.size() > 0) spectrumHighResGroups = sortScans("spectrum_HR", highSpectrum, 720); if (highDiffReflectivity.size() > 0) diffReflectHighResGroups = sortScans("diffReflect_HR", highDiffReflectivity, 720); if (highDiffPhase.size() > 0) diffPhaseHighResGroups = sortScans("diffPhase_HR", highDiffPhase, 720); if (highCorreCoefficient.size() > 0) coefficientHighResGroups = sortScans("coefficient_HR", highCorreCoefficient, 720); } private ArrayList sortScans(String name, List<Level2Record> scans, int siz) { // now group by elevation_num Map<Short, List<Level2Record>> groupHash = new HashMap<Short, List<Level2Record>>(siz); for (Level2Record record : scans) { List<Level2Record> group = groupHash.get(record.elevation_num); if (null == group) { group = new ArrayList<Level2Record>(); groupHash.put(record.elevation_num, group); } group.add(record); } // sort the groups by elevation_num ArrayList groups = new ArrayList(groupHash.values()); Collections.sort(groups, new GroupComparator()); // use the maximum radials for (int i = 0; i < groups.size(); i++) { ArrayList group = (ArrayList) groups.get(i); Level2Record r = (Level2Record) group.get(0); if (runCheck) testScan(name, group); if (r.getGateCount(REFLECTIVITY_HIGH) > 500 || r.getGateCount(VELOCITY_HIGH) > 1000) { max_radials_hr = Math.max(max_radials_hr, group.size()); min_radials_hr = Math.min(min_radials_hr, group.size()); } else { max_radials = Math.max(max_radials, group.size()); min_radials = Math.min(min_radials, group.size()); } } if (debugRadials) { System.out.println(name + " min_radials= " + min_radials + " max_radials= " + max_radials); for (int i = 0; i < groups.size(); i++) { ArrayList group = (ArrayList) groups.get(i); Level2Record lastr = (Level2Record) group.get(0); for (int j = 1; j < group.size(); j++) { Level2Record r = (Level2Record) group.get(j); if (r.data_msecs < lastr.data_msecs) System.out.println(" out of order " + j); lastr = r; } } } testVariable(name, groups); if (debugScans) System.out.println("-----------------------------"); return groups; } public int getMaxRadials(int r) { if (r == 0) return max_radials; else if (r == 1) return max_radials_hr; else return 0; } public int getMinRadials(int r) { if (r == 0) return min_radials; else if (r == 1) return min_radials_hr; else return 0; } public int getDopplarResolution() { return dopplarResolution; } public boolean hasDifferentDopplarResolutions() { return hasDifferentDopplarResolutions; } public boolean hasHighResolutions(int dt) { if (dt == 0) return hasHighResolutionData; else if (dt == 1) return hasHighResolutionREF; else if (dt == 2) return hasHighResolutionVEL; else if (dt == 3) return hasHighResolutionSW; else if (dt == 4) return hasHighResolutionZDR; else if (dt == 5) return hasHighResolutionPHI; else if (dt == 6) return hasHighResolutionRHO; else return false; } // do we have same characteristics for all records in a scan? private int MAX_RADIAL = 721; private int[] radial = new int[MAX_RADIAL]; private boolean testScan(String name, ArrayList group) { int datatype = name.equals("reflect") ? Level2Record.REFLECTIVITY : Level2Record.VELOCITY_HI; Level2Record first = (Level2Record) group.get(0); int n = group.size(); if (debugScans) { boolean hasBoth = first.hasDopplerData && first.hasReflectData; System.out.println( name + " " + first + " has " + n + " radials resolution= " + first.resolution + " has both = " + hasBoth); } boolean ok = true; double sum = 0.0; double sum2 = 0.0; for (int i = 0; i < MAX_RADIAL; i++) radial[i] = 0; for (int i = 0; i < group.size(); i++) { Level2Record r = (Level2Record) group.get(i); /* this appears to be common - seems to be ok, we put missing values in if (r.getGateCount(datatype) != first.getGateCount(datatype)) { log.error(raf.getLocation()+" different number of gates ("+r.getGateCount(datatype)+ "!="+first.getGateCount(datatype)+") in record "+name+ " "+r); ok = false; } */ if (r.getGateSize(datatype) != first.getGateSize(datatype)) { log.warn( raf.getLocation() + " different gate size (" + r.getGateSize(datatype) + ") in record " + name + " " + r); ok = false; } if (r.getGateStart(datatype) != first.getGateStart(datatype)) { log.warn( raf.getLocation() + " different gate start (" + r.getGateStart(datatype) + ") in record " + name + " " + r); ok = false; } if (r.resolution != first.resolution) { log.warn( raf.getLocation() + " different resolution (" + r.resolution + ") in record " + name + " " + r); ok = false; } if ((r.radial_num < 0) || (r.radial_num >= MAX_RADIAL)) { log.info( raf.getLocation() + " radial out of range= " + r.radial_num + " in record " + name + " " + r); continue; } if (radial[r.radial_num] > 0) { log.warn( raf.getLocation() + " duplicate radial = " + r.radial_num + " in record " + name + " " + r); ok = false; } radial[r.radial_num] = r.recno + 1; sum += r.getElevation(); sum2 += r.getElevation() * r.getElevation(); // System.out.println(" elev="+r.getElevation()+" azi="+r.getAzimuth()); } for (int i = 1; i < radial.length; i++) { if (0 == radial[i]) { if (n != (i - 1)) { log.warn(" missing radial(s)"); ok = false; } break; } } double avg = sum / n; double sd = Math.sqrt((n * sum2 - sum * sum) / (n * (n - 1))); // System.out.println(" avg elev="+avg+" std.dev="+sd); return ok; } // do we have same characteristics for all groups in a variable? private boolean testVariable(String name, List scans) { int datatype = name.equals("reflect") ? Level2Record.REFLECTIVITY : Level2Record.VELOCITY_HI; if (scans.size() == 0) { log.warn(" No data for = " + name); return false; } boolean ok = true; List firstScan = (List) scans.get(0); Level2Record firstRecord = (Level2Record) firstScan.get(0); dopplarResolution = firstRecord.resolution; if (debugGroups2) System.out.println( "Group " + Level2Record.getDatatypeName(datatype) + " ngates = " + firstRecord.getGateCount(datatype) + " start = " + firstRecord.getGateStart(datatype) + " size = " + firstRecord.getGateSize(datatype)); for (int i = 1; i < scans.size(); i++) { List scan = (List) scans.get(i); Level2Record record = (Level2Record) scan.get(0); if ((datatype == Level2Record.VELOCITY_HI) && (record.resolution != firstRecord.resolution)) { // do all velocity resolutions match ?? log.warn( name + " scan " + i + " diff resolutions = " + record.resolution + ", " + firstRecord.resolution + " elev= " + record.elevation_num + " " + record.getElevation()); ok = false; hasDifferentDopplarResolutions = true; } if (record.getGateSize(datatype) != firstRecord.getGateSize(datatype)) { log.warn( name + " scan " + i + " diff gates size = " + record.getGateSize(datatype) + " " + firstRecord.getGateSize(datatype) + " elev= " + record.elevation_num + " " + record.getElevation()); ok = false; } else if (debugGroups2) System.out.println( " ok gates size elev= " + record.elevation_num + " " + record.getElevation()); if (record.getGateStart(datatype) != firstRecord.getGateStart(datatype)) { log.warn( name + " scan " + i + " diff gates start = " + record.getGateStart(datatype) + " " + firstRecord.getGateStart(datatype) + " elev= " + record.elevation_num + " " + record.getElevation()); ok = false; } else if (debugGroups2) System.out.println( " ok gates start elev= " + record.elevation_num + " " + record.getElevation()); if (record.message_type == 31) { hasHighResolutionData = true; // each data type if (record.hasHighResREFData) hasHighResolutionREF = true; if (record.hasHighResVELData) hasHighResolutionVEL = true; if (record.hasHighResSWData) hasHighResolutionSW = true; if (record.hasHighResZDRData) hasHighResolutionZDR = true; if (record.hasHighResPHIData) hasHighResolutionPHI = true; if (record.hasHighResRHOData) hasHighResolutionRHO = true; } } return ok; } /** * Get Reflectivity Groups Groups are all the records for a variable and elevation_num; * * @return List of type List of type Level2Record */ public List getReflectivityGroups() { return reflectivityGroups; } /** * Get Velocity Groups Groups are all the records for a variable and elevation_num; * * @return List of type List of type Level2Record */ public List getVelocityGroups() { return dopplerGroups; } public List getHighResVelocityGroups() { return velocityHighResGroups; } public List getHighResReflectivityGroups() { return reflectivityHighResGroups; } public List getHighResSpectrumGroups() { return spectrumHighResGroups; } public List getHighResDiffReflectGroups() { return diffReflectHighResGroups; } public List getHighResDiffPhaseGroups() { return diffPhaseHighResGroups; } public List getHighResCoeffocientGroups() { return coefficientHighResGroups; } private class GroupComparator implements Comparator<List<Level2Record>> { public int compare(List<Level2Record> group1, List<Level2Record> group2) { Level2Record record1 = group1.get(0); Level2Record record2 = group2.get(0); // if (record1.elevation_num != record2.elevation_num) return record1.elevation_num - record2.elevation_num; // return record1.cut - record2.cut; } } /** * Get data format (ARCHIVE2, AR2V0001) for this file. * * @return data format (ARCHIVE2, AR2V0001) for this file. */ public String getDataFormat() { return dataFormat; } /** * Get the starting Julian day for this volume * * @return days since 1/1/70. */ public int getTitleJulianDays() { return title_julianDay; } /** * Get the starting time in seconds since midnight. * * @return Generation time of data in milliseconds of day past midnight (UTC). */ public int getTitleMsecs() { return title_msecs; } /** * Get the Volume Coverage Pattern number for this data. * * @return VCP * @see Level2Record#getVolumeCoveragePatternName */ public int getVCP() { return vcp; } /** * Get the 4-char station ID for this data * * @return station ID (may be null) */ public String getStationId() { return stationId; } public String getStationName() { return station == null ? "unknown" : station.name; } public double getStationLatitude() { return station == null ? 0.0 : station.lat; } public double getStationLongitude() { return station == null ? 0.0 : station.lon; } public double getStationElevation() { return station == null ? 0.0 : station.elev; } public Date getStartDate() { return first.getDate(); } public Date getEndDate() { return last.getDate(); } /** * Write equivilent uncompressed version of the file. * * @param inputRaf file to uncompress * @param ufilename write to this file * @return raf of uncompressed file * @throws IOException on read error */ private RandomAccessFile uncompress(RandomAccessFile inputRaf, String ufilename) throws IOException { RandomAccessFile outputRaf = new RandomAccessFile(ufilename, "rw"); FileLock lock = null; while (true) { // loop waiting for the lock try { lock = outputRaf.getRandomAccessFile().getChannel().lock(0, 1, false); break; } catch (OverlappingFileLockException oe) { // not sure why lock() doesnt block try { Thread.sleep(100); // msecs } catch (InterruptedException e1) { } } } try { inputRaf.seek(0); byte[] header = new byte[Level2Record.FILE_HEADER_SIZE]; inputRaf.read(header); outputRaf.write(header); boolean eof = false; int numCompBytes; byte[] ubuff = new byte[40000]; byte[] obuff = new byte[40000]; try { CBZip2InputStream cbzip2 = new CBZip2InputStream(); while (!eof) { try { numCompBytes = inputRaf.readInt(); if (numCompBytes == -1) { if (log.isDebugEnabled()) log.debug(" done: numCompBytes=-1 "); break; } } catch (EOFException ee) { log.warn(" got EOFException "); break; // assume this is ok } if (log.isDebugEnabled()) { log.debug( "reading compressed bytes " + numCompBytes + " input starts at " + inputRaf.getFilePointer() + "; output starts at " + outputRaf.getFilePointer()); } /* * For some stupid reason, the last block seems to * have the number of bytes negated. So, we just * assume that any negative number (other than -1) * is the last block and go on our merry little way. */ if (numCompBytes < 0) { if (log.isDebugEnabled()) log.debug("last block?" + numCompBytes); numCompBytes = -numCompBytes; eof = true; } byte[] buf = new byte[numCompBytes]; inputRaf.readFully(buf); ByteArrayInputStream bis = new ByteArrayInputStream(buf, 2, numCompBytes - 2); // CBZip2InputStream cbzip2 = new CBZip2InputStream(bis); cbzip2.setStream(bis); int total = 0; int nread; /* while ((nread = cbzip2.read(ubuff)) != -1) { dout2.write(ubuff, 0, nread); total += nread; } */ try { while ((nread = cbzip2.read(ubuff)) != -1) { if (total + nread > obuff.length) { byte[] temp = obuff; obuff = new byte[temp.length * 2]; System.arraycopy(temp, 0, obuff, 0, temp.length); } System.arraycopy(ubuff, 0, obuff, total, nread); total += nread; } if (obuff.length >= 0) outputRaf.write(obuff, 0, total); } catch (BZip2ReadException ioe) { log.warn("Nexrad2IOSP.uncompress ", ioe); } float nrecords = (float) (total / 2432.0); if (log.isDebugEnabled()) log.debug( " unpacked " + total + " num bytes " + nrecords + " records; ouput ends at " + outputRaf.getFilePointer()); } } catch (Exception e) { if (outputRaf != null) outputRaf.close(); outputRaf = null; // dont leave bad files around File ufile = new File(ufilename); if (ufile.exists()) { if (!ufile.delete()) log.warn("failed to delete uncompressed file (IOException)" + ufilename); } if (e instanceof IOException) throw (IOException) e; else throw new RuntimeException(e); } } finally { if (null != outputRaf) outputRaf.flush(); if (lock != null) lock.release(); } return outputRaf; } // debugging static void bdiff(String filename) throws IOException { InputStream in1 = new FileInputStream(filename + ".tmp"); InputStream in2 = new FileInputStream(filename + ".tmp2"); int count = 0; int bad = 0; while (true) { int b1 = in1.read(); int b2 = in2.read(); if (b1 < 0) break; if (b2 < 0) break; if (b1 != b2) { System.out.println(count + " in1=" + b1 + " in2= " + b2); bad++; if (bad > 130) break; } count++; } System.out.println("total read = " + count); } // check if compressed file seems ok public static long testValid(String ufilename) throws IOException { boolean lookForHeader = false; // gotta make it RandomAccessFile raf = new RandomAccessFile(ufilename, "r"); raf.order(RandomAccessFile.BIG_ENDIAN); raf.seek(0); byte[] b = new byte[8]; raf.read(b); String test = new String(b); if (test.equals(Level2VolumeScan.ARCHIVE2) || test.equals(Level2VolumeScan.AR2V0001)) { System.out.println("--Good header= " + test); raf.seek(24); } else { System.out.println("--No header "); lookForHeader = true; raf.seek(0); } boolean eof = false; int numCompBytes; try { while (!eof) { if (lookForHeader) { raf.read(b); test = new String(b); if (test.equals(Level2VolumeScan.ARCHIVE2) || test.equals(Level2VolumeScan.AR2V0001)) { System.out.println(" found header= " + test); raf.skipBytes(16); lookForHeader = false; } else { raf.skipBytes(-8); } } try { numCompBytes = raf.readInt(); if (numCompBytes == -1) { System.out.println("\n--done: numCompBytes=-1 "); break; } } catch (EOFException ee) { System.out.println("\n--got EOFException "); break; // assume this is ok } System.out.print(" " + numCompBytes + ","); if (numCompBytes < 0) { System.out.println("\n--last block " + numCompBytes); numCompBytes = -numCompBytes; if (!lookForHeader) eof = true; } raf.skipBytes(numCompBytes); } } catch (EOFException e) { e.printStackTrace(); } return raf.getFilePointer(); } /** test */ public static void main2(String[] args) throws IOException { File testDir = new File("C:/data/bad/radar2/"); File[] files = testDir.listFiles(); for (int i = 0; i < files.length; i++) { File file = files[i]; if (!file.getPath().endsWith(".ar2v")) continue; System.out.println(file.getPath() + " " + file.length()); long pos = testValid(file.getPath()); if (pos == file.length()) { System.out.println("OK"); try { NetcdfFile.open(file.getPath()); } catch (Throwable t) { System.out.println("ERROR= " + t); } } else System.out.println("NOT pos=" + pos); System.out.println(); } } public static void main(String args[]) throws IOException { NexradStationDB.init(); RandomAccessFile raf = new RandomAccessFile( "/upc/share/testdata/radar/nexrad/level2/Level2_KFTG_20060818_1814.ar2v.uncompress.missingradials", "r"); // RandomAccessFile raf = new // RandomAccessFile("R:/testdata2/radar/nexrad/level2/problem/KCCX_20060627_1701", "r"); new Level2VolumeScan(raf, null); } }