/** * Query RadarServer controller for Spring Framework * * @param request HttpServletRequest * @param response HttpServletResponse * @return ModelAndView * @throws Exception */ protected ModelAndView handleRequestInternal( HttpServletRequest request, HttpServletResponse response) throws Exception { try { // Gather diagnostics for logging request. log.info("handleRequestInternal(): " + UsageLog.setupRequestContext(request)); // catch rogue invalid request here if (request.getQueryString() == null) { log.info("Invalid dataset url reference " + request.getPathInfo()); throw new RadarServerException("Invalid dataset url reference " + request.getPathInfo()); } // Query results in model Map<String, Object> model = new HashMap<String, Object>(); radarQuery(request, response, model); if (model == null || model.size() == 0) { ModelAndView mav = new ModelAndView(CREATE_VIEW); mav.addObject(MODEL_KEY, MSG_CODE); return mav; } else { return new ModelAndView("queryXml", model); } } catch (RadarServerException e) { throw e; // pass it onto Spring exceptionResolver } catch (Throwable e) { log.error("handleRequestInternal(): Problem handling request.", e); log.info( "handleRequestInternal(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_BAD_REQUEST, -1)); throw new RadarServerException("handleRequestInternal(): Problem handling request."); } }
public void update() { synchronized (lock) { boolean forceProtoLocal = forceProto; if (fmrcDataset == null) { try { fmrcDataset = new FmrcDataset(config); } catch (Throwable t) { logger.error(config.spec + ": initial fmrcDataset creation failed", t); // throw new RuntimeException(t); } } try { FmrcInv fmrcInv = makeFmrcInv(null); fmrcDataset.setInventory(fmrcInv, forceProtoLocal); logger.debug(config.spec + ": make new Dataset, new proto = {}", forceProtoLocal); if (forceProtoLocal) forceProto = false; this.lastInvChanged = System.currentTimeMillis(); if (forceProtoLocal) this.lastProtoChanged = this.lastInvChanged; } catch (Throwable t) { logger.error(config.spec + ": makeFmrcInv failed", t); // throw new RuntimeException(t); } } }
/** * Looks for the parameter table which matches the center, subcenter and table version from the * tables array. If this is the first time asking for this table, then the parameters for this * table have not been read in yet, so this is done as well. * * @param center - integer from PDS octet 5, representing Center. * @param subcenter - integer from PDS octet 26, representing Subcenter * @param tableVersion - integer from PDS octet 4, representing Parameter Table Version * @return GribPDSParamTable matching center, subcenter, and number * @throws NotSupportedException no table found */ public static GribPDSParamTable getParameterTable(int center, int subcenter, int tableVersion) throws NotSupportedException { String key = center + "_" + subcenter + "_" + tableVersion; if (center == -1) { // non existent table logger.error("GribPDSParamTable: non existent table for center, subcenter, table = " + key); return null; } GribPDSParamTable table = tableMap.get(key); if (table != null) return table; table = readParameterTable(center, subcenter, tableVersion, true); if (table == null) { logger.error("GribPDSParamTable: cannot find table for center, subcenter, table " + key); throw new NotSupportedException( "Could not find a table entry for GRIB file with center: " + center + " subCenter: " + subcenter + " number: " + tableVersion); } tableMap.put(key, table); return table; }
@Override public boolean exists(final String path) { LOGGER.debug("exists (path : {})", path); Boolean exists = executionHandler( new Callable<Boolean>() { @Override public Boolean call() throws Exception { int replyCode = ftpClient.stat(path); String replyText = ftpClient.getReplyString(); if (!FTPReply.isPositiveCompletion(replyCode)) { // this replyText code is set when file doesn't exist on the server if (FTPReply.FILE_ACTION_NOT_TAKEN == replyCode) return false; else { LOGGER.warn("Unexpected Reply (Code: {}, Text: '{}'", replyCode, replyText); } } String[] replyTextParts = replyText.split("\n"); if (replyTextParts.length <= 2) { if (ftpClient.changeWorkingDirectory(path)) ftpClient.changeToParentDirectory(); else return false; } return true; } }); LOGGER.debug("Returns: {}", exists); return exists; }
@Transactional public AccessKey create(@NotNull User user, @NotNull AccessKey accessKey) { if (accessKey.getLabel() == null) { throw new IllegalParametersException(Messages.LABEL_IS_REQUIRED); } Optional<AccessKey> akOpt = genericDAO .createNamedQuery( AccessKey.class, "AccessKey.getByUserAndLabel", Optional.<CacheConfig>empty()) .setParameter("userId", user.getId()) .setParameter("label", accessKey.getLabel()) .getResultList() .stream() .findFirst(); if (akOpt.isPresent()) { logger.error("Access key with label {} already exists", accessKey.getLabel()); throw new ActionNotAllowedException(Messages.DUPLICATE_LABEL_FOUND); } if (accessKey.getId() != null) { logger.error("Access key id shouldn't be present in request parameters"); throw new IllegalParametersException(Messages.INVALID_REQUEST_PARAMETERS); } authenticationUtils.validateActions(accessKey); AccessKeyProcessor keyProcessor = new AccessKeyProcessor(); String key = keyProcessor.generateKey(); accessKey.setKey(key); accessKey.setUser(user); genericDAO.persist(accessKey); for (AccessKeyPermission current : accessKey.getPermissions()) { AccessKeyPermission permission = preparePermission(current); permission.setAccessKey(accessKey); genericDAO.persist(permission); } return genericDAO.find(AccessKey.class, accessKey.getId()); }
private void sendMaybeEmail(String to) { String subject = "Processing is taking a long time"; StringBuilder content = new StringBuilder(); content.append("Your process is taking longer than expected."); content.append(" It might finish in a bit, but here is the status so far"); content.append("\n\tUpload: ").append((uploadSuccessful) ? "success" : "waiting"); content.append("\n\tParse: ").append((netcdfSuccessful) ? "success" : "waiting"); content.append("\n\tStatistics: ").append((rStatsSuccessful) ? "success" : "waiting"); content.append("\n\tMetadata: ").append((cswTransSuccessful) ? "success" : "waiting"); content.append( "\n\nYou will receive another email if there is a success, but may not receive a failure notification."); List<String> bcc = new ArrayList<String>(); String from = props.getProperty("watersmart.email.from"); String bccAddr = props.getProperty("watersmart.email.tracker"); if (!"".equals(bccAddr)) { bcc.add(bccAddr); } EmailMessage message = new EmailMessage(from, to, null, bcc, subject, content.toString()); try { EmailHandler.sendMessage(message); } catch (AddressException ex) { log.error("Unable to send maybe e-mail:\n" + message, ex); } catch (MessagingException ex) { log.error("Unable to send maybe e-mail:\n" + message, ex); } }
public void init() throws javax.servlet.ServletException { super.init(); org.slf4j.Logger logServerStartup = org.slf4j.LoggerFactory.getLogger("serverStartup"); logServerStartup.info( getClass().getName() + " initialization start - " + UsageLog.setupNonRequestContext()); this.ascLimit = ThreddsConfig.getInt("Opendap.ascLimit", ascLimit); this.binLimit = ThreddsConfig.getInt("Opendap.binLimit", binLimit); this.odapVersionString = ThreddsConfig.get("Opendap.serverVersion", odapVersionString); logServerStartup.info( getClass().getName() + " version= " + odapVersionString + " ascLimit = " + ascLimit + " binLimit = " + binLimit); // debugging actions makeDebugActions(); logServerStartup.info( getClass().getName() + " initialization done - " + UsageLog.closingMessageNonRequestContext()); }
public HTMLText load(final PanelInstance instance) { final List results = new ArrayList(); try { new HibernateTxFragment() { protected void txFragment(Session session) throws Exception { FlushMode oldFlushMode = session.getFlushMode(); session.setFlushMode(FlushMode.NEVER); Query query = session.createQuery( " from " + HTMLText.class.getName() + " as text where text.panelInstance = :instance"); query.setParameter("instance", instance); query.setCacheable(true); results.addAll(query.list()); session.setFlushMode(oldFlushMode); } }.execute(); HTMLText text = null; if (results.size() > 0) text = (HTMLText) results.get(0); else log.debug("Does not exist a html_text for HTML panel"); return text; } catch (Exception e) { log.error("Can't retrive a data for HTML panel ", e); return null; } }
/** * Determine the text being shown for given panel. * * @param panel * @return The text shown, i18n. */ public Map getHtmlCode(Panel panel) { PanelSession pSession = SessionManager.getPanelSession(panel); Map m = (Map) pSession.getAttribute(ATTR_TEXT); if (m != null) return m; HTMLText text = load(panel.getInstance()); if (text != null) return text.getText(); try { HTMLText textToCreate = new HTMLText(); textToCreate.setPanelInstance(panel.getInstance()); Locale[] locales = LocaleManager.lookup().getPlatformAvailableLocales(); for (int i = 0; i < locales.length; i++) { Locale locale = locales[i]; ResourceBundle i18n = localeManager.getBundle("org.jboss.dashboard.ui.panel.advancedHTML.messages", locale); textToCreate.setText(locale.getLanguage(), i18n.getString("defaultContent")); } textToCreate.save(); } catch (Exception e) { log.error("Error creating empty text for panel: ", e); } text = load(panel.getInstance()); if (text != null) return text.getText(); log.error("Current HTML code is null for panel " + panel); return null; }
private boolean createPartitionedIndex(Formatter f) throws IOException { long start = System.currentTimeMillis(); // create partitions based on TimePartitionCollections object for (CollectionManager dcm : tpc.makePartitions()) { tp.addPartition(dcm); } List<TimePartition.Partition> bad = new ArrayList<TimePartition.Partition>(); for (TimePartition.Partition dc : tp.getPartitions()) { try { dc.makeGribCollection(f); // ensure collection has been read successfully if (trace) f.format(" Open partition %s%n", dc.getDcm().getCollectionName()); } catch (Throwable t) { logger.error(" Failed to open partition " + dc.getName(), t); f.format(" FAIL on partition %s (remove) %n", dc.getDcm().getCollectionName()); bad.add(dc); // LOOK may be a file leak ? } } // remove ones that failed for (TimePartition.Partition p : bad) tp.removePartition(p); // choose the "canonical" partition, aka prototype int n = tp.getPartitions().size(); if (n == 0) { logger.error(" Nothing in this partition = " + tp.getName()); f.format(" FAIL Partition empty collection = %s%n", tp.getName()); return false; } int idx = tpc.getProtoIndex(n); TimePartition.Partition canon = tp.getPartitions().get(idx); f.format(" Using canonical partition %s%n", canon.getDcm().getCollectionName()); // check consistency across vert and ens coords if (!checkPartitions(canon, f)) { logger.error( " Partition check failed, index not written on {} message = {}", tp.getName(), f.toString()); f.format(" FAIL Partition check collection = %s%n", tp.getName()); return false; } // make the time coordinates, place results into canon createPartitionedTimeCoordinates(canon, f); // ready to write the index file writeIndex(canon, f); // close open gc's tp.cleanup(); long took = System.currentTimeMillis() - start; f.format(" CreatePartitionedIndex took %d msecs%n", took); return true; }
private void deleteAccessKeyPermissions(AccessKey key) { logger.debug("Deleting all permission of access key {}", key.getId()); int deleted = genericDAO .createNamedQuery( "AccessKeyPermission.deleteByAccessKey", Optional.<CacheConfig>empty()) .setParameter("accessKey", key) .executeUpdate(); logger.info("Deleted {} permissions by access key {}", deleted, key.getId()); }
public boolean load(String abspath) { abspath = abspath.replace('\\', '/'); File rcFile = new File(abspath); if (!rcFile.exists() || !rcFile.canRead()) { return false; } if (showlog) log.debug("Loading rc file: " + abspath); try (BufferedReader rdr = new BufferedReader(new InputStreamReader(new FileInputStream(rcFile), CDM.UTF8))) { for (int lineno = 1; ; lineno++) { URL url = null; String line = rdr.readLine(); if (line == null) break; // trim leading blanks line = line.trim(); if (line.length() == 0) continue; // empty line if (line.charAt(0) == '#') continue; // check for comment // parse the line if (line.charAt(0) == LTAG) { int rindex = line.indexOf(RTAG); if (rindex < 0) return false; if (showlog) log.error("Malformed [url] at " + abspath + "." + lineno); String surl = line.substring(1, rindex); try { url = new URL(surl); } catch (MalformedURLException mue) { if (showlog) log.error("Malformed [url] at " + abspath + "." + lineno); } line = line.substring(rindex + 1); // trim again line = line.trim(); } // Get the key,value part String[] pieces = line.split("\\s*=\\s*"); assert (pieces.length == 1 || pieces.length == 2); // Create the triple String value = "1"; if (pieces.length == 2) value = pieces[1].trim(); Triple triple = new Triple(pieces[0].trim(), value, url); List<Triple> list = triplestore.get(triple.key); if (list == null) list = new ArrayList<Triple>(); Triple prev = addtriple(list, triple); triplestore.put(triple.key, list); } } catch (FileNotFoundException fe) { if (showlog) log.debug("Loading rc file: " + abspath); return false; } catch (IOException ioe) { if (showlog) log.error("File " + abspath + ": IO exception: " + ioe.getMessage()); return false; } return true; }
@Scheduled(cron = "0 0 0 * * SUN") @Transactional(propagation = Propagation.NOT_SUPPORTED) public void removeExpiredKeys() { logger.debug("Removing expired access keys"); int removed = genericDAO .createNamedQuery("AccessKey.deleteOlderThan", Optional.<CacheConfig>empty()) .setParameter("expirationDate", timestampService.getTimestamp()) .executeUpdate(); logger.info("Removed {} expired access keys", removed); }
private void checkSize(ServerDDS dds, boolean isAscii) throws Exception { long size = computeSize(dds, isAscii); // System.err.printf("total (constrained) size=%s\n", size); log.debug("total (constrained) size={}", size); double dsize = size / (1000 * 1000); double maxSize = isAscii ? ascLimit : binLimit; // Mbytes if (dsize > maxSize) { log.info("Reject request size = {} Mbytes", dsize); throw new UnsupportedOperationException( "Request too big=" + dsize + " Mbytes, max=" + maxSize); } }
// any time the server needs access to the dataset, it gets a "GuardedDataset" which allows us to // add caching // optionally, a session may be established, which allows us to reserve the dataset for that // session. protected GuardedDataset getDataset(ReqState preq) throws Exception { HttpServletRequest req = preq.getRequest(); String reqPath = preq.getDataSet(); // see if the client wants sessions boolean acceptSession = false; String s = req.getHeader("X-Accept-Session"); if (s != null && s.equalsIgnoreCase("true") && allowSessions) acceptSession = true; HttpSession session = null; if (acceptSession) { // see if theres already a session established, create one if not session = req.getSession(); if (!session.isNew()) { GuardedDataset gdataset = (GuardedDataset) session.getAttribute(reqPath); if (null != gdataset) { if (debugSession) System.out.printf(" found gdataset %s in session %s %n", reqPath, session.getId()); if (log.isDebugEnabled()) log.debug(" found gdataset " + gdataset + " in session " + session.getId()); return gdataset; } } } NetcdfFile ncd = DatasetHandler.getNetcdfFile(req, preq.getResponse(), reqPath); if (null == ncd) return null; GuardedDataset gdataset = new GuardedDatasetCacheAndClone(reqPath, ncd, acceptSession); // GuardedDataset gdataset = new GuardedDatasetImpl(reqPath, ncd, acceptSession); if (acceptSession) { String cookiePath = req.getRequestURI(); String suffix = "." + preq.getRequestSuffix(); if (cookiePath.endsWith(suffix)) // snip off the suffix cookiePath = cookiePath.substring(0, cookiePath.length() - suffix.length()); session.setAttribute(reqPath, gdataset); session.setAttribute(CookieFilter.SESSION_PATH, cookiePath); // session.setAttribute("dataset", ncd.getLocation()); // for UsageValve // session.setMaxInactiveInterval(30); // 30 second timeout !! if (debugSession) System.out.printf( " added gdataset %s in session %s cookiePath %s %n", reqPath, session.getId(), cookiePath); if (log.isDebugEnabled()) log.debug(" added gdataset " + gdataset + " in session " + session.getId()); } /* else { session = req.getSession(); session.setAttribute("dataset", ncd.getLocation()); // for UsageValve } */ return gdataset; }
/** Read instance content from given InputStream, which must not be closed. */ public void importContent(PanelInstance instance, InputStream is) throws Exception { HTMLText currentText = new HTMLText(); currentText.setPanelInstance(instance); ObjectInputStream ois = new ObjectInputStream(is); Map text = (Map) ois.readObject(); if (log.isDebugEnabled()) log.debug("Importing content: " + text); for (Iterator it = text.keySet().iterator(); it.hasNext(); ) { String lang = (String) it.next(); String value = (String) text.get(lang); currentText.setText(lang, value); } currentText.save(); }
/** * Send given content string as the HTTP response. * * @param contents the string to return as the HTTP response. * @param res the HttpServletResponse * @throws IOException if an I/O error occurs while writing the response. */ public static void returnString(String contents, HttpServletResponse res) throws IOException { try { ServletOutputStream out = res.getOutputStream(); IO.copy(new ByteArrayInputStream(contents.getBytes()), out); log.info( UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_OK, contents.length())); } catch (IOException e) { log.error(" IOException sending string: ", e); log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); res.sendError(HttpServletResponse.SC_NOT_FOUND, "Problem sending string: " + e.getMessage()); } }
/** * Write a file to the response stream. Handles Range requests. * * @param servlet called from here * @param req the request * @param res the response * @param file to serve * @param contentType content type, if null, will try to guess * @throws IOException on write error */ public static void returnFile( HttpServlet servlet, HttpServletRequest req, HttpServletResponse res, File file, String contentType) throws IOException { // No file, nothing to view if (file == null) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } // check that it exists if (!file.exists()) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_NOT_FOUND, 0)); res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } // not a directory if (!file.isFile()) { log.info( "returnFile(): " + UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_BAD_REQUEST, 0)); res.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } // Set the type of the file String filename = file.getPath(); if (null == contentType) { if (filename.endsWith(".html")) contentType = "text/html; charset=iso-8859-1"; else if (filename.endsWith(".xml")) contentType = "text/xml; charset=iso-8859-1"; else if (filename.endsWith(".txt") || (filename.endsWith(".log"))) contentType = CONTENT_TEXT; else if (filename.indexOf(".log.") > 0) contentType = CONTENT_TEXT; else if (filename.endsWith(".nc")) contentType = "application/x-netcdf"; else contentType = servlet.getServletContext().getMimeType(filename); if (contentType == null) contentType = "application/octet-stream"; } returnFile(req, res, file, contentType); }
public void actionRefresh(CommandRequest request) { try { String timeOutValue = request.getRequestObject().getParameter("refreshTimeOut"); if (!StringUtils.isBlank(timeOutValue)) { try { autoRefreshTimeout = Integer.decode(timeOutValue).intValue(); } catch (NumberFormatException e) { log.warn("Cannot parse auto refresh value as a number."); } } getDashboard().refresh(); } catch (Exception e) { log.error("Cannot refresh dashboard.", e); } }
public static boolean saveFile( HttpServlet servlet, String contentPath, String path, HttpServletRequest req, HttpServletResponse res) { // @todo Need to use logServerAccess() below here. boolean debugRequest = Debug.isSet("SaveFile"); if (debugRequest) log.debug(" saveFile(): path= " + path); String filename = contentPath + path; // absolute path File want = new File(filename); // backup current version if it exists int version = getBackupVersion(want.getParent(), want.getName()); String fileSave = filename + "~" + version; File file = new File(filename); if (file.exists()) { try { IO.copyFile(filename, fileSave); } catch (IOException e) { log.error( "saveFile(): Unable to save copy of file " + filename + " to " + fileSave + "\n" + e.getMessage()); return false; } } // save new file try { OutputStream out = new BufferedOutputStream(new FileOutputStream(filename)); IO.copy(req.getInputStream(), out); out.close(); if (debugRequest) log.debug("saveFile(): ok= " + filename); res.setStatus(HttpServletResponse.SC_CREATED); log.info(UsageLog.closingMessageForRequestContext(HttpServletResponse.SC_CREATED, -1)); return true; } catch (IOException e) { log.error( "saveFile(): Unable to PUT file " + filename + " to " + fileSave + "\n" + e.getMessage()); return false; } }
public DashboardFilterProperty[] getAllPropertiesForCurrentFilter() { List results = new ArrayList(); try { // Static properties. DashboardFilterProperty[] staticProps = getStaticPropertiesForCurrentFilter(); if (staticProps != null) results.addAll(Arrays.asList(staticProps)); // Dynamic properties. Iterator it = getDashboard().getDataProviders().iterator(); while (it.hasNext()) { DataProvider dataProvider = (DataProvider) it.next(); DataProperty[] allProperties = dataProvider.getDataSet().getProperties(); for (int i = 0; i < allProperties.length; i++) { DataProperty property = allProperties[i]; DashboardFilterProperty prop = getDashboardFilterPropertyForCurrentFilter( dataProvider.getCode(), property.getPropertyId()); if (prop == null) prop = new DashboardFilterProperty( dataProvider.getCode(), property.getPropertyId(), getFilter(), null, false); results.add(prop); } } } catch (Exception e) { log.error("Cannot get data provider results.", e); } return (DashboardFilterProperty[]) results.toArray(new DashboardFilterProperty[results.size()]); }
/** * Loops through an input stream and converts it into a string, then closes the input stream * * @param is * @return */ public static String GetStringFromInputStream(InputStream is) { String line; StringBuilder total = new StringBuilder(); // Wrap a BufferedReader around the InputStream BufferedReader rd = new BufferedReader(new InputStreamReader(is)); // Read response until the end try { while ((line = rd.readLine()) != null) { total.append(line); } } catch (IOException e) { e.printStackTrace(); } finally { try { is.close(); } catch (Exception e) { tracer.warn( SessionLogcatAppender.MARKER_INTERNAL, "GetStringFromInputStream - could not close stream"); } } // Return full string return total.toString(); }
public static byte[] GetByteArrayFromInputStream(InputStream is) { try { int length; int size = 1024; byte[] buffer; if (is instanceof ByteArrayInputStream) { size = is.available(); buffer = new byte[size]; is.read(buffer, 0, size); } else { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); buffer = new byte[size]; while ((length = is.read(buffer, 0, size)) != -1) { outputStream.write(buffer, 0, length); } buffer = outputStream.toByteArray(); } return buffer; } catch (IOException e) { e.printStackTrace(); } finally { try { is.close(); } catch (Exception e) { tracer.warn( SessionLogcatAppender.MARKER_INTERNAL, "GetStringFromInputStream - could not close stream"); } } return null; }
/** * Method use OpenCsv Library for * * @param fileInputCsv the File CSV to parse. * @param separator the char separator. * @return the List of Bean parsed from the CSV file. */ public static List<String[]> parseCSVFileAsList(File fileInputCsv, char separator) { try { List<String[]> records; // read all lines at once try ( // create CSVReader object CSVReader reader = new CSVReader(new FileReader(fileInputCsv), separator)) { // read all lines at once records = reader.readAll(); Iterator<String[]> iterator = records.iterator(); records.clear(); // skip header row iterator.next(); while (iterator.hasNext()) { String[] record = iterator.next(); records.add(record); } } return records; } catch (IOException e) { logger.error( "Can't parse the CSV file:" + fileInputCsv.getAbsolutePath() + " -> " + e.getMessage(), e); return new ArrayList<>(); } }
@Override public FTPFile getFileInfo(final String path) { LOGGER.debug("getFileInfo (path: {})", path); return executionHandler( new Callable<FTPFile>() { @Override public FTPFile call() throws Exception { if ("/".equals(path)) { FTPFile rootFile = new FTPFile(); rootFile.setName("/"); rootFile.setTimestamp(GregorianCalendar.getInstance()); return rootFile; } else { String lastPathPart = UrlUtils.getLastPathPart(path); String parentPath = UrlUtils.getParentPath(path); ftpClient.changeWorkingDirectory(parentPath); FTPFile[] ftpFiles = ftpClient.listFiles(); for (FTPFile ftpFile : ftpFiles) { if (ftpFile.getName().equals(lastPathPart)) { return ftpFile; } } return new FTPFile(); } } }); }
private void sendFailedEmail(Exception ex, String to) { String subject = "WaterSMART processing failed"; StringBuilder content = new StringBuilder(); content.append("Your request unfortunately failed, we are looking into it."); content.append("\n\tUpload: ").append((uploadSuccessful) ? "success" : "failure"); content.append("\n\tParse: ").append((netcdfSuccessful) ? "success" : "failure"); content.append("\n\tStatistics: ").append((rStatsSuccessful) ? "success" : "failure"); content.append("\n\tMetadata: ").append((cswTransSuccessful) ? "success" : "failure"); RunMetadata metaObj = RunMetadata.getInstance(metadata); content.append("\n\n\tFile: ").append(metaObj.getFileName()); content.append("\n\tModeler: ").append(metaObj.getName()); content.append("\n\tComments: ").append(metaObj.getComments()); content.append("\n\tDate: ").append(metaObj.getCreationDate()); content.append("\n\nthe application failed with message: ").append(ex.getMessage()); content.append("\n\nhere is the stack trace for troubleshooting:\n\n"); for (StackTraceElement el : ex.getStackTrace()) { content.append(el.toString()).append("\n"); } List<String> bcc = new ArrayList<String>(); String from = props.getProperty("watersmart.email.from"); String bccAddr = props.getProperty("watersmart.email.tracker"); if (!"".equals(bccAddr)) { bcc.add(bccAddr); } EmailMessage message = new EmailMessage(from, to, null, bcc, subject, content.toString()); try { EmailHandler.sendMessage(message); } catch (AddressException ex1) { log.error( "Unable to send failed e-mail:\n" + message + "\n\nOriginal Exception:\n" + ex.getMessage(), ex1); } catch (MessagingException ex1) { log.error( "Unable to send failed e-mail:\n" + message + "\n\nOriginal Exception:\n" + ex.getMessage(), ex1); } }
public WcsCoverage(GridDataset.Gridset coverage, WcsDataset dataset) { this.dataset = dataset; if (this.dataset == null) { log.error("WcsCoverage(): non-null dataset required."); throw new IllegalArgumentException("Non-null dataset required."); } this.coverage = coverage; if (this.coverage == null) { log.error("WcsCoverage(): non-null coverage required."); throw new IllegalArgumentException("Non-null coverage required."); } this.coordSys = coverage.getGeoCoordSystem(); if (this.coordSys == null) { log.error("WcsCoverage(): Coverage must have non-null coordinate system."); throw new IllegalArgumentException("Non-null coordinate system required."); } this.name = this.coordSys.getName(); this.label = this.coordSys.getName(); this.range = new HashMap<String, WcsRangeField>(); StringBuilder descripSB = new StringBuilder("All parameters on the \"") .append(this.name) .append("\" coordinate system: "); for (GridDatatype curField : this.coverage.getGrids()) { String stdName = curField.findAttValueIgnoreCase("standard_name", ""); descripSB.append(stdName.length() == 0 ? curField.getFullName() : stdName).append(","); WcsRangeField field = new WcsRangeField(curField); range.put(field.getName(), field); } descripSB.setCharAt(descripSB.length() - 1, '.'); this.description = descripSB.toString(); this.nativeCRS = EPSG_OGC_CF_Helper.getWcs1_0CrsId(this.coordSys.getProjection()); this.defaultRequestCrs = "OGC:CRS84"; this.supportedCoverageFormatList = new ArrayList<WcsRequest.Format>(); // this.supportedCoverageFormatList = "application/x-netcdf"; this.supportedCoverageFormatList.add(WcsRequest.Format.GeoTIFF); this.supportedCoverageFormatList.add(WcsRequest.Format.GeoTIFF_Float); this.supportedCoverageFormatList.add(WcsRequest.Format.NetCDF3); }
private static DiskCache2 getDiskCache() { if (diskCache == null) { log.error("getDiskCache(): Disk cache has not been set."); throw new IllegalStateException( "Disk cache must be set before calling GetCoverage.getDiskCache()."); } return diskCache; }
/** Write instance content to given OutputStream, which must not be closed. */ public void exportContent(PanelInstance instance, OutputStream os) throws Exception { HTMLText text = load(instance); if (text == null) { try { text = new HTMLText(); text.setPanelInstance(instance); text.save(); } catch (Exception e) { log.error("Error creating empty HTMLText object", e); } } ObjectOutputStream oos = new ObjectOutputStream(os); if (log.isDebugEnabled()) log.debug("Exporting content: " + text.getText()); HashMap h = new HashMap(); // Avoids serializing a hibernate map h.putAll(text.getText()); oos.writeObject(h); }
/** * _more_ * * @param aTableList _more_ * @param aTables _more_ * @return Was read successful * @throws IOException On badness */ private static boolean readTableEntries(String aTableList, ArrayList<GribPDSParamTable> aTables) throws IOException { InputStream inputStream = GribResourceReader.getInputStream(aTableList); if (inputStream == null) { logger.debug("Could not open table file:" + aTableList); return false; } return readTableEntries(inputStream, aTableList, aTables); }