void setFormData(WObject.FormData formData) { if (!(formData.values.length == 0)) { List<String> attributes = new ArrayList<String>(); attributes = new ArrayList<String>(Arrays.asList(formData.values[0].split(";"))); if (attributes.size() == 6) { try { this.volume_ = Double.parseDouble(attributes.get(0)); } catch (RuntimeException e) { this.volume_ = -1; } try { this.current_ = Double.parseDouble(attributes.get(1)); } catch (RuntimeException e) { this.current_ = -1; } try { this.duration_ = Double.parseDouble(attributes.get(2)); } catch (RuntimeException e) { this.duration_ = -1; } this.playing_ = attributes.get(3).equals("0"); this.ended_ = attributes.get(4).equals("1"); try { this.readyState_ = intToReadyState(Integer.parseInt(attributes.get(5))); } catch (RuntimeException e) { throw new WException( "WAbstractMedia: error parsing: " + formData.values[0] + ": " + e.toString()); } } else { throw new WException("WAbstractMedia: error parsing: " + formData.values[0]); } } }
private void doDisableMetadataIndexing() { ArchivalUnit au = getAu(); if (au == null) return; try { disableMetadataIndexing(au, false); } catch (RuntimeException e) { log.error("Can't disable metadata indexing", e); errMsg = "Error: " + e.toString(); } }
private void forceReindexMetadata() { ArchivalUnit au = getAu(); if (au == null) return; try { startReindexingMetadata(au, true); } catch (RuntimeException e) { log.error("Can't reindex metadata", e); errMsg = "Error: " + e.toString(); } }
public void handleElement(Element e) { // create morph item if (e.getName().equals("entry")) { try { morphItems.add(new MorphItem(e)); } catch (RuntimeException exc) { System.err.println("Skipping morph item: " + e.getAttributeValue("word")); System.err.println(exc.toString()); } } // create macro item else if (e.getName().equals("macro")) { try { macroItems.add(new MacroItem(e)); } catch (RuntimeException exc) { System.err.println("Skipping macro item: " + e.getAttributeValue("name")); System.err.println(exc.toString()); } } }
/** * Obtains DOMImpementaton interface providing a number of methods for performing operations that * are independent of any particular DOM instance. * * @throw DOMException <code>NOT_SUPPORTED_ERR</code> if cannot get DOMImplementation * @throw FactoryConfigurationError Application developers should never need to directly catch * errors of this type. * @return DOMImplementation implementation */ private static DOMImplementation getDOMImplementation() throws DOMException { // can be made public DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); try { return factory.newDocumentBuilder().getDOMImplementation(); } catch (ParserConfigurationException ex) { throw new DOMException( DOMException.NOT_SUPPORTED_ERR, "Cannot create parser satisfying configuration parameters"); // NOI18N } catch (RuntimeException e) { // E.g. #36578, IllegalArgumentException. Try to recover gracefully. throw (DOMException) new DOMException(DOMException.NOT_SUPPORTED_ERR, e.toString()).initCause(e); } }
public void handleElement(Element e) { // create family if (e.getName().equals("family")) { try { lexicon.add(new Family(e)); } catch (RuntimeException exc) { System.err.println("Skipping family: " + e.getAttributeValue("name")); System.err.println(exc.toString()); } } // save distributive attributes else if (e.getName().equals("distributive-features")) distrElt = e; // save licensing features else if (e.getName().equals("licensing-features")) licensingElt = e; // save relation sort order else if (e.getName().equals("relation-sorting")) relationSortingElt = e; }
private boolean startCrawl(ArchivalUnit au, boolean force, boolean deep) throws CrawlManagerImpl.NotEligibleException { CrawlManagerImpl cmi = (CrawlManagerImpl) crawlMgr; if (force) { RateLimiter limit = cmi.getNewContentRateLimiter(au); if (!limit.isEventOk()) { limit.unevent(); } } cmi.checkEligibleToQueueNewContentCrawl(au); String delayMsg = ""; String deepMsg = ""; try { cmi.checkEligibleForNewContentCrawl(au); } catch (CrawlManagerImpl.NotEligibleException e) { delayMsg = ", Start delayed due to: " + e.getMessage(); } Configuration config = ConfigManager.getCurrentConfig(); int pri = config.getInt(PARAM_CRAWL_PRIORITY, DEFAULT_CRAWL_PRIORITY); CrawlReq req; try { req = new CrawlReq(au); req.setPriority(pri); if (deep) { int d = Integer.parseInt(formDepth); if (d < 0) { errMsg = "Illegal refetch depth: " + d; return false; } req.setRefetchDepth(d); deepMsg = "Deep (" + req.getRefetchDepth() + ") "; } } catch (NumberFormatException e) { errMsg = "Illegal refetch depth: " + formDepth; return false; } catch (RuntimeException e) { log.error("Couldn't create CrawlReq: " + au, e); errMsg = "Couldn't create CrawlReq: " + e.toString(); return false; } cmi.startNewContentCrawl(req, null); statusMsg = deepMsg + "Crawl requested for " + au.getName() + delayMsg; return true; }
private OAuthAccessToken parseJsonToken(HttpMessage response) { com.google.gson.JsonObject root = new com.google.gson.JsonObject(); com.google.gson.JsonParseException pe = null; try { root = (com.google.gson.JsonObject) new com.google.gson.JsonParser().parse(response.getBody()); } catch (com.google.gson.JsonParseException error) { pe = error; } boolean ok = root != null; if (!ok) { logger.error( new StringWriter().append("parseJsonToken(): ").append(pe.toString()).toString()); throw new OAuthProcess.TokenError(WString.tr("Wt.Auth.OAuthService.badjson")); } else { if (response.getStatus() == 200) { try { String accessToken = root.get("access_token").getAsString(); int secs = JsonUtils.orIfNullInt(root.get("expires_in"), -1); WDate expires = null; if (secs > 0) { expires = new WDate(new Date()).addSeconds(secs); } String refreshToken = JsonUtils.orIfNullString(root.get("refreshToken"), ""); return new OAuthAccessToken(accessToken, expires, refreshToken); } catch (RuntimeException e) { logger.error( new StringWriter().append("token response error: ").append(e.toString()).toString()); throw new OAuthProcess.TokenError(WString.tr("Wt.Auth.OAuthService.badresponse")); } } else { throw new OAuthProcess.TokenError( WString.tr( "Wt.Auth.OAuthService." + JsonUtils.orIfNullString(root.get("error"), "missing error"))); } } }
// given EntriesItem private void getWithEntriesItem( Word w, MorphItem mi, String stem, String pred, String targetPred, String targetRel, EntriesItem item, MacroAdder macAdder, Map<String, Double> supertags, Set<String> supertagsFound, SignHash result) { // ensure apropos if (targetPred != null && !targetPred.equals(pred)) return; if (targetRel != null && !targetRel.equals(item.getIndexRel()) && !targetRel.equals(item.getCoartRel())) return; if (!item.getActive().booleanValue()) return; if (mi.excluded(item)) return; try { // copy and add macros Category cat = item.getCat().copy(); macAdder.addMacros(cat); // replace DEFAULT_VAL with pred, after first // unifying type of associated nom var(s) with sem class unifySemClass(cat, mi.getWord().getSemClass()); REPLACEMENT = pred; cat.deepMap(defaultReplacer); // check supertag // TODO: think about earlier checks for efficiency, for grammars where macros and preds don't // matter // Double lexprob = null; // nb: skipping lex log probs, don't seem to be helpful if (supertags != null) { // skip if not found String stag = cat.getSupertag(); if (!supertags.containsKey(stag)) return; // otherwise update found supertags supertagsFound.add(stag); // get lex prob // lexprob = supertags.get(stag); } // propagate types of nom vars propagateTypes(cat); // handle distrib attrs and inherits-from propagateDistributiveAttrs(cat); expandInheritsFrom(cat); // merge stem, pos, sem class from morph item, plus supertag from cat Word word = Word.createFullWord(w, mi.getWord(), cat.getSupertag()); // set origin and lexprob Sign sign = new Sign(word, cat); sign.setOrigin(); // if (lexprob != null) { // sign.addData(new SupertaggerAdapter.LexLogProb((float) Math.log10(lexprob))); // } // return sign result.insert(sign); } catch (RuntimeException exc) { System.err.println( "Warning: ignoring entry: " + item.getName() + " of family: " + item.getFamilyName() + " for stem: " + stem + " b/c: " + exc.toString()); } }
public void execute(FacesContext context) throws FacesException { boolean isFiner = log.isLoggable(Level.FINER); if (context.getResponseComplete() || context.getRenderResponse()) return; beforePhase(context, PhaseId.RESTORE_VIEW); try { if (isFiner) log.finer("JSF[] before restore view"); restoreView(context); } finally { afterPhase(context, PhaseId.RESTORE_VIEW); } if (context.getResponseComplete() || context.getRenderResponse()) return; UIViewRoot viewRoot = context.getViewRoot(); beforePhase(context, PhaseId.APPLY_REQUEST_VALUES); try { if (isFiner) log.finer(context.getViewRoot() + " before process decodes"); viewRoot.processDecodes(context); } catch (RuntimeException e) { log.log(Level.WARNING, e.toString(), e); } finally { afterPhase(context, PhaseId.APPLY_REQUEST_VALUES); } // // Process Validations (processValidators) // if (context.getResponseComplete() || context.getRenderResponse()) return; beforePhase(context, PhaseId.PROCESS_VALIDATIONS); try { if (isFiner) log.finer(context.getViewRoot() + " before process validators"); viewRoot.processValidators(context); } finally { afterPhase(context, PhaseId.PROCESS_VALIDATIONS); } // // Update Model Values (processUpdates) // if (context.getResponseComplete() || context.getRenderResponse()) return; beforePhase(context, PhaseId.UPDATE_MODEL_VALUES); try { if (isFiner) log.finer(context.getViewRoot() + " before process updates"); viewRoot.processUpdates(context); } catch (RuntimeException e) { if (sendError(context, "processUpdates", e)) return; } finally { afterPhase(context, PhaseId.UPDATE_MODEL_VALUES); } // // Invoke Application (processApplication) // if (context.getResponseComplete() || context.getRenderResponse()) return; beforePhase(context, PhaseId.INVOKE_APPLICATION); try { if (isFiner) log.finer(context.getViewRoot() + " before process application"); viewRoot.processApplication(context); } finally { afterPhase(context, PhaseId.INVOKE_APPLICATION); } }