protected void fired(double currentTime, Future<VocalizationCommand> commandFuture) { IPerceptualBuffer pBuffer = (IPerceptualBuffer) getBuffer(); IModel model = pBuffer.getModel(); try { VocalizationCommand command = commandFuture.get(); ActualState state = command.getActualState(); if (state != ActualState.COMPLETED) throw new IllegalStateException( "Execution of vocalization failed : " + command.getResult()); String msg = "Execution of vocalization completed"; if (LOGGER.isDebugEnabled()) LOGGER.debug(msg); if (Logger.hasLoggers(model)) Logger.log(model, Logger.Stream.VOCAL, msg); IChunk free = model.getDeclarativeModule().getFreeChunk(); pBuffer.setExecutionChunk(free); pBuffer.setStateChunk(free); } catch (InterruptedException e) { return; } catch (Exception e) { String msg = e.getMessage(); if (e instanceof ExecutionException) msg = ((ExecutionException) e).getCause().getMessage(); if (LOGGER.isDebugEnabled()) LOGGER.debug(msg); if (Logger.hasLoggers(model)) Logger.log(model, Logger.Stream.VOCAL, msg); IChunk error = model.getDeclarativeModule().getErrorChunk(); pBuffer.setExecutionChunk(error); pBuffer.setStateChunk(error); } }
@Override public void writeRevision(final Revision rev) throws IOException { final ParsedPage pp = parser.parse(rev.Text); if (pp == null) { LOGGER.warn("Could not parse page with title {}", pageTitle); } else if (pp.getSections() != null) { final Set<String> declinations = getDeclinations(pp.getTemplates()); if (!declinations.isEmpty()) { nounTitles.addAll(declinations); } for (final Section section : pp.getSections()) { final List<Template> partOfSpeechTemplates = getPartOfSpeechTemplates(section); if (!partOfSpeechTemplates.isEmpty()) { for (final Template template : partOfSpeechTemplates) { if (isNoun.f(getFirstParameter.f(template))) { nounTitles.add(pageTitle); if (declinations.isEmpty() && LOGGER.isDebugEnabled()) { LOGGER.debug("Found no declinations for page {}", pageTitle); } } } return; } } if (LOGGER.isDebugEnabled() && rev.Text.contains("Substantiv")) { LOGGER.debug( "No part-of-speech found for {} (which indeed contains 'Substantiv')", pageTitle); } } }
/** * Constructs a request logging filter wrapper. * * @param request the request to wrap. * @throws IOException if any problems were encountered while reading from the stream. */ public RequestLoggingFilterWrapper(HttpServletRequest request) throws IOException { // Perform super class processing. super(request); // Only grab the payload if debugging is enabled. Otherwise, we'd always be pre-reading the // entire payload for no reason which cause a slight // performance degradation for no reason. if (LOGGER.isDebugEnabled()) { // Read the original payload into the payload variable. InputStream inputStream = null; try { // Get the input stream. inputStream = request.getInputStream(); if (inputStream != null) { // Read the payload from the input stream. payload = IOUtils.toByteArray(request.getInputStream()); } } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException iox) { LOGGER.warn("Unable to close request input stream.", iox); } } } } }
/** If the file is defined, delete it. */ protected void deleteFile(File file) { if (file != null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Deleting PDF file: " + file.getName()); } if (!file.delete()) { LOGGER.warn("Cannot delete file:" + file.getAbsolutePath()); } } }
@Override public void run() { try { long totalCount = configInfoLoader.getConfigInfoCount(); if (totalCount <= 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("%s没有加载到配置信息", ApiConfiguratorProvider.class)); } return; } for (long startIndex = 0; startIndex < totalCount; startIndex += PAGE_SIZE) { List<ConfigInfo> configInfos = configInfoLoader.getConfigInfo(startIndex, PAGE_SIZE); if (LOGGER.isDebugEnabled()) { LOGGER.debug( String.format( "%s加载配置信息, totalCount:%s, startIndex:%s, pageSize:%s", ApiConfiguratorProvider.class, totalCount, startIndex, PAGE_SIZE)); } for (ConfigInfo configInfo : configInfos) { if (configInfo.isValid()) { propertiesMap.put(wrapKey(configInfo.getKey()), configInfo.getValue()); if (LOGGER.isDebugEnabled()) { LOGGER.debug( String.format( "%s覆盖配置, Key:%s, Value:%s", ApiConfiguratorProvider.class, configInfo.getKey(), configInfo.getValue())); } } else { propertiesMap.remove(wrapKey(configInfo.getKey())); if (LOGGER.isDebugEnabled()) { LOGGER.debug( String.format( "%s移除配置, Key:%s, Value:%s", ApiConfiguratorProvider.class, configInfo.getKey(), configInfo.getValue())); } } } } } catch (Exception e) { LOGGER.error(String.format("%s更新配置信息出错", ApiConfiguratorProvider.class), e); } }
/** * Called when the wrapped {@link ActivityBehavior} calls the {@link * AbstractBpmnActivityBehavior#leave(ActivityExecution)} method. Handles the completion of one of * the parallel instances */ public void leave(ActivityExecution execution) { callActivityEndListeners(execution); int loopCounter = getLoopVariable(execution, LOOP_COUNTER); int nrOfInstances = getLoopVariable(execution, NUMBER_OF_INSTANCES); int nrOfCompletedInstances = getLoopVariable(execution, NUMBER_OF_COMPLETED_INSTANCES) + 1; int nrOfActiveInstances = getLoopVariable(execution, NUMBER_OF_ACTIVE_INSTANCES) - 1; if (isExtraScopeNeeded()) { // In case an extra scope was created, it must be destroyed first before going further ExecutionEntity extraScope = (ExecutionEntity) execution; execution = execution.getParent(); extraScope.remove(); } setLoopVariable(execution.getParent(), NUMBER_OF_COMPLETED_INSTANCES, nrOfCompletedInstances); setLoopVariable(execution.getParent(), NUMBER_OF_ACTIVE_INSTANCES, nrOfActiveInstances); logLoopDetails( execution, "instance completed", loopCounter, nrOfCompletedInstances, nrOfActiveInstances, nrOfInstances); ExecutionEntity executionEntity = (ExecutionEntity) execution; executionEntity.inactivate(); executionEntity.getParent().forceUpdate(); List<ActivityExecution> joinedExecutions = executionEntity.findInactiveConcurrentExecutions(execution.getActivity()); if (joinedExecutions.size() == nrOfInstances || completionConditionSatisfied(execution)) { // Removing all active child executions (ie because completionCondition is true) List<ExecutionEntity> executionsToRemove = new ArrayList<ExecutionEntity>(); for (ActivityExecution childExecution : executionEntity.getParent().getExecutions()) { if (childExecution.isActive()) { executionsToRemove.add((ExecutionEntity) childExecution); } } for (ExecutionEntity executionToRemove : executionsToRemove) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Execution {} still active, but multi-instance is completed. Removing this execution.", executionToRemove); } executionToRemove.inactivate(); executionToRemove.deleteCascade("multi-instance completed"); } executionEntity.takeAll( executionEntity.getActivity().getOutgoingTransitions(), joinedExecutions); } }
private Set<String> getDeclinations(final List<Template> allTemplates) { final List<Template> declinationTemplates = getTemplate(allTemplates, "Deutsch_Substantiv_Übersicht"); if (!declinationTemplates.isEmpty()) { if (declinationTemplates.size() > 1 && LOGGER.isDebugEnabled()) { LOGGER.debug( "Found more than 1 'Deutsch_Substantiv_Übersicht' template for {}", pageTitle); } final Set<String> declinations = new HashSet<String>(); final Template template = declinationTemplates.get(0); for (final String param : template.getParameters()) { final String[] parts = SINGULAR_PLURAL_PARAM_PATTERN.split(param, 0); if (parts.length != 2 && LOGGER.isDebugEnabled()) { LOGGER.debug( "Have unexpected declination template/pattern for page '{}': {}\nExpected pattern: {}", new Object[] {pageTitle, param, SINGULAR_PLURAL_PARAM_PATTERN.pattern()}); } else { declinations.add(parts[1].trim()); } } return declinations; } return Collections.emptySet(); }
@Override protected void callCallback( final ResponseBuffers responseBuffers, final Throwable throwableFromCallback) { try { if (throwableFromCallback != null) { throw throwableFromCallback; } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Command execution completed"); } if (!ProtocolHelper.isCommandOk( new BsonBinaryReader(new ByteBufferBsonInput(responseBuffers.getBodyByteBuffer())))) { throw getCommandFailureException( getResponseDocument(responseBuffers, message, new BsonDocumentCodec()), connectionDescription.getServerAddress()); } if (commandListener != null) { sendSucceededEvent( connectionDescription, startTimeNanos, message, getResponseDocument(responseBuffers, message, new RawBsonDocumentCodec())); } callback.onResult( getResponseDocument(responseBuffers, message, commandResultDecoder), null); } catch (Throwable t) { sendFailedEvent(connectionDescription, startTimeNanos, message, t); callback.onResult(null, t); } finally { if (responseBuffers != null) { responseBuffers.close(); } } }
/** * Do the actual work of creating the PDF temporary file. * * @throws InterruptedException */ protected TempFile doCreatePDFFile(String spec, HttpServletRequest httpServletRequest) throws IOException, DocumentException, ServletException, InterruptedException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Generating PDF for spec=" + spec); } if (SPEC_LOGGER.isInfoEnabled()) { SPEC_LOGGER.info(spec); } PJsonObject specJson = MapPrinter.parseSpec(spec); if (specJson.has("app")) { app = specJson.getString("app"); } else { app = null; } MapPrinter mapPrinter = getMapPrinter(app); Map<String, String> headers = new HashMap<String, String>(); TreeSet<String> configHeaders = mapPrinter.getConfig().getHeaders(); if (configHeaders == null) { configHeaders = new TreeSet<String>(); configHeaders.add("Referer"); configHeaders.add("Cookie"); } for (Iterator<String> header_iter = configHeaders.iterator(); header_iter.hasNext(); ) { String header = header_iter.next(); if (httpServletRequest.getHeader(header) != null) { headers.put(header, httpServletRequest.getHeader(header)); } } final OutputFormat outputFormat = mapPrinter.getOutputFormat(specJson); // create a temporary file that will contain the PDF final File tempJavaFile = File.createTempFile( TEMP_FILE_PREFIX, "." + outputFormat.getFileSuffix() + TEMP_FILE_SUFFIX, getTempDir()); TempFile tempFile = new TempFile(tempJavaFile, specJson, outputFormat); FileOutputStream out = null; try { out = new FileOutputStream(tempFile); mapPrinter.print(specJson, out, headers); return tempFile; } catch (IOException e) { deleteFile(tempFile); throw e; } catch (DocumentException e) { deleteFile(tempFile); throw e; } catch (InterruptedException e) { deleteFile(tempFile); throw e; } finally { if (out != null) { out.close(); } } }
private Map<String, List<DbAttribute>> loadDbAttributes() throws SQLException { Map<String, List<DbAttribute>> attributes = new HashMap<>(); try (ResultSet rs = getMetaData().getColumns(getCatalog(), getSchema(), "%", "%"); ) { Set<String> columns = new HashSet<String>(); while (rs.next()) { if (columns.isEmpty()) { ResultSetMetaData rsMetaData = rs.getMetaData(); for (int i = 1; i <= rsMetaData.getColumnCount(); i++) { columns.add(rsMetaData.getColumnLabel(i)); } } // for a reason not quiet apparent to me, Oracle sometimes // returns duplicate record sets for the same table, messing up // table // names. E.g. for the system table "WK$_ATTR_MAPPING" columns // are // returned twice - as "WK$_ATTR_MAPPING" and // "WK$$_ATTR_MAPPING"... Go figure String tableName = rs.getString("TABLE_NAME"); String columnName = rs.getString("COLUMN_NAME"); PatternFilter columnFilter = filter.isIncludeTable(tableName); /* * Here is possible optimization if filter will contain * map<tableName, columnFilter> we can replace it after tables * loading since already done pattern matching once and exactly * know all tables that we want to process */ if (columnFilter == null || !columnFilter.isInclude(columnName)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Skip column '" + tableName + "." + columnName + "' (Path: " + getCatalog() + "/" + getSchema() + "; Filter: " + columnFilter + ")"); } continue; } List<DbAttribute> attrs = attributes.get(tableName); if (attrs == null) { attrs = new LinkedList<DbAttribute>(); attributes.put(tableName, attrs); } attrs.add(loadDbAttribute(columns, rs)); } } return attributes; }
protected void renderTiles( TileRenderer formater, Transformer transformer, URI commonUri, ParallelMapTileLoader parallelMapTileLoader) throws IOException, URISyntaxException { final List<URI> urls = new ArrayList<URI>(1); final double offsetX; final double offsetY; final long bitmapTileW; final long bitmapTileH; int nbTilesW = 0; double minGeoX = transformer.getRotatedMinGeoX(); double minGeoY = transformer.getRotatedMinGeoY(); double maxGeoX = transformer.getRotatedMaxGeoX(); double maxGeoY = transformer.getRotatedMaxGeoY(); if (tileCacheLayerInfo != null) { // tiled transformer = fixTiledTransformer(transformer); if (transformer == null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Resolution out of bounds."); } urls.add(null); } bitmapTileW = tileCacheLayerInfo.getWidth(); bitmapTileH = tileCacheLayerInfo.getHeight(); final double tileGeoWidth = transformer.getResolution() * bitmapTileW; final double tileGeoHeight = transformer.getResolution() * bitmapTileH; // TODO I would like to do this sort of thing by extension points for plugins // the tileMinGeoSize is not calculated the same way in TileCache // and KaMap, so they are treated differently here. final double tileMinGeoX; final double tileMinGeoY; if (this instanceof TileCacheMapReader) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("TileCacheMapReader min geo x and y calculation used"); } tileMinGeoX = (float) (Math.floor((minGeoX - tileCacheLayerInfo.getMinX()) / tileGeoWidth) * tileGeoWidth) + tileCacheLayerInfo.getMinX(); tileMinGeoY = (float) (Math.floor((minGeoY - tileCacheLayerInfo.getMinY()) / tileGeoHeight) * tileGeoHeight) + tileCacheLayerInfo.getMinY(); } else if (this instanceof KaMapCacheMapReader || this instanceof KaMapMapReader) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Kamap min geo x and y calculation used"); } tileMinGeoX = (float) (Math.floor((minGeoX) / tileGeoWidth) * tileGeoWidth); tileMinGeoY = (float) (Math.floor((minGeoY) / tileGeoHeight) * tileGeoHeight); } else if (this instanceof WMTSMapReader) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("WMTS min geo x and y calculation used"); } tileMinGeoX = (Math.floor((minGeoX - tileCacheLayerInfo.getMinX()) / tileGeoWidth) * tileGeoWidth) + tileCacheLayerInfo.getMinX(); tileMinGeoY = tileCacheLayerInfo.getMaxY() - ((Math.ceil((tileCacheLayerInfo.getMaxY() - minGeoY) / tileGeoHeight)) * tileGeoHeight); } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Default min geo x and y calculation used"); } tileMinGeoX = (float) (Math.floor((minGeoX - tileCacheLayerInfo.getMinX()) / tileGeoWidth) * tileGeoWidth) + tileCacheLayerInfo.getMinX(); tileMinGeoY = (float) (Math.floor((minGeoY - tileCacheLayerInfo.getMinY()) / tileGeoHeight) * tileGeoHeight) + tileCacheLayerInfo.getMinY(); } // Linksverschiebung des Kachelstartpunktes in Pixel offsetX = (minGeoX - tileMinGeoX) / transformer.getResolution(); // Höhenverschiebung (nach unten) des Kachelstartpunktes in Pixel offsetY = (minGeoY - tileMinGeoY) / transformer.getResolution(); for (double geoY = tileMinGeoY; geoY < maxGeoY; geoY += tileGeoHeight) { nbTilesW = 0; for (double geoX = tileMinGeoX; geoX < maxGeoX; geoX += tileGeoWidth) { nbTilesW++; if (tileCacheLayerInfo.isVisible(geoX, geoY, geoX + tileGeoWidth, geoY + tileGeoHeight)) { double exactLeftX = 958760.7160000019; double exactBottomY = 6389320.947999999; boolean positionCorrectX = geoX <= (exactLeftX + 0.25) && geoX >= (exactLeftX - 0.25); boolean positionCorrectY = geoY <= (exactBottomY + 0.25) && geoY >= (exactBottomY - 0.25); boolean positionCorrect = positionCorrectX && positionCorrectY; LOGGER.debug(positionCorrect ? "Position Correct" : "Position Incorrect"); urls.add( getTileUri( commonUri, transformer, geoX, geoY, geoX + tileGeoWidth, geoY + tileGeoHeight, bitmapTileW, bitmapTileH)); } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Tile out of bounds: " + getTileUri( commonUri, transformer, geoX, geoY, geoX + tileGeoWidth, geoY + tileGeoHeight, bitmapTileW, bitmapTileH)); } urls.add(null); } } } } else { // single tile nbTilesW = 1; offsetX = 0; offsetY = 0; bitmapTileW = transformer.getRotatedBitmapW(); bitmapTileH = transformer.getRotatedBitmapH(); urls.add( getTileUri( commonUri, transformer, minGeoX, minGeoY, maxGeoX, maxGeoY, bitmapTileW, bitmapTileH)); } // TODO Hier wird falsch scaliert formater.render( transformer, urls, parallelMapTileLoader, context, opacity, nbTilesW, offsetX, offsetY, bitmapTileW, bitmapTileH); }