private static ConsumerConfig createConsumerConfig( String zookeeper, String groupId, String optionalConfigs) { try { Properties props = new Properties(); props.put(KafkaEventAdapterConstants.ADAPTOR_SUSCRIBER_ZOOKEEPER_CONNECT, zookeeper); props.put(KafkaEventAdapterConstants.ADAPTOR_SUSCRIBER_GROUP_ID, groupId); if (optionalConfigs != null) { String[] optionalProperties = optionalConfigs.split(","); if (optionalProperties != null && optionalProperties.length > 0) { for (String header : optionalProperties) { String[] configPropertyWithValue = header.split(":", 2); if (configPropertyWithValue.length == 2) { props.put(configPropertyWithValue[0], configPropertyWithValue[1]); } else { log.warn( "Optional configuration property not defined in the correct format.\nRequired - property_name1:property_value1,property_name2:property_value2\nFound - " + optionalConfigs); } } } } return new ConsumerConfig(props); } catch (NoClassDefFoundError e) { throw new InputEventAdapterRuntimeException( "Cannot access kafka context due to missing jars", e); } }
/** * Reads the POST contents of the request and parses it into an Annotation object, ready to be * annotated. This method can also read a serialized document, if the input format is set to be * serialized. * * @param props The properties we are annotating with. This is where the input format is retrieved * from. * @param httpExchange The exchange we are reading POST data from. * @return An Annotation representing the read document. * @throws IOException Thrown if we cannot read the POST data. * @throws ClassNotFoundException Thrown if we cannot load the serializer. */ private Annotation getDocument(Properties props, HttpExchange httpExchange) throws IOException, ClassNotFoundException { String inputFormat = props.getProperty("inputFormat", "text"); switch (inputFormat) { case "text": return new Annotation( IOUtils.slurpReader(new InputStreamReader(httpExchange.getRequestBody()))); case "serialized": String inputSerializerName = props.getProperty("inputSerializer", ProtobufAnnotationSerializer.class.getName()); AnnotationSerializer serializer = MetaClass.create(inputSerializerName).createInstance(); Pair<Annotation, InputStream> pair = serializer.read(httpExchange.getRequestBody()); return pair.first; default: throw new IOException("Could not parse input format: " + inputFormat); } }
/** * Parse the parameters of a connection into a CoreNLP properties file that can be passed into * {@link StanfordCoreNLP}, and used in the I/O stages. * * @param httpExchange The http exchange; effectively, the request information. * @return A {@link Properties} object corresponding to a combination of default and passed * properties. * @throws UnsupportedEncodingException Thrown if we could not decode the key/value pairs with * UTF-8. */ private Properties getProperties(HttpExchange httpExchange) throws UnsupportedEncodingException { // Load the default properties Properties props = new Properties(); defaultProps .entrySet() .stream() .forEach( entry -> props.setProperty(entry.getKey().toString(), entry.getValue().toString())); // Try to get more properties from query string. Map<String, String> urlParams = getURLParams(httpExchange.getRequestURI()); if (urlParams.containsKey("properties")) { StringUtils.decodeMap(URLDecoder.decode(urlParams.get("properties"), "UTF-8")) .entrySet() .forEach(entry -> props.setProperty(entry.getKey(), entry.getValue())); } else if (urlParams.containsKey("props")) { StringUtils.decodeMap(URLDecoder.decode(urlParams.get("properties"), "UTF-8")) .entrySet() .forEach(entry -> props.setProperty(entry.getKey(), entry.getValue())); } // Make sure the properties compile props.setProperty( "annotators", StanfordCoreNLP.ensurePrerequisiteAnnotators( props.getProperty("annotators").split("[, \t]+"))); return props; }
public static void main(String[] args) throws Exception { Properties configFile = new Properties(); configFile.load(new FileInputStream("my_config.properties")); myConnString = configFile.getProperty("MYCONN"); try { mysqlConn = DriverManager.getConnection(myConnString); myStm = mysqlConn.createStatement(); myStm.executeQuery("set wait_timeout = 7200"); } catch (Exception e) { System.out.println("MySQL Offline."); System.exit(1); } getBlogs(); Thread.sleep(1000); // For cleaning mongo cursos myStm.close(); }
private Properties getServerProperties() { Properties serverProperties = new Properties(); serverProperties.put("zookeeper.connect", zkServer.getConnectString()); serverProperties.put("broker.id", "1"); serverProperties.put("host.name", "localhost"); serverProperties.put("port", String.valueOf(kafkaPort)); serverProperties.put("log.dir", FileUtils.createTmpDir("embedded-kafka")); serverProperties.put("log.flush.interval.messages", "1"); return serverProperties; }
public void prepareDependencies(Properties properties, String worldName) { for (PluginSpec plugin : plugins.values()) { String depString; depString = properties.getProperty("multiverse.plugin_dep." + worldName + "." + plugin.pluginType); if (depString == null) { depString = properties.getProperty("multiverse.plugin_dep." + plugin.pluginType); } if (depString == null) continue; depString = depString.trim(); String[] deps = null; if (!depString.equals("")) deps = depString.split(","); dependencies.put(plugin.pluginType, deps); if (Log.loggingDebug) Log.debug( "plugin type " + plugin.pluginType + " depends on plugin types: " + ((deps == null) ? "*none*" : depString)); } }
public StanfordCoreNLPServer(int port) throws IOException { serverPort = port; defaultProps = new Properties(); defaultProps.setProperty( "annotators", "tokenize, ssplit, pos, lemma, ner, parse, depparse, natlog, openie, dcoref"); defaultProps.setProperty("inputFormat", "text"); defaultProps.setProperty("outputFormat", "json"); // Generate and write a shutdown key String tmpDir = System.getProperty("java.io.tmpdir"); File tmpFile = new File(tmpDir + File.separator + "corenlp.shutdown"); tmpFile.deleteOnExit(); if (tmpFile.exists()) { if (!tmpFile.delete()) { throw new IllegalStateException("Could not delete shutdown key file"); } } this.shutdownKey = new BigInteger(130, new Random()).toString(32); IOUtils.writeStringToFile(shutdownKey, tmpFile.getPath(), "utf-8"); // Set the static page handler this.staticPageHandle = new FileHandler("edu/stanford/nlp/pipeline/demo/corenlp-brat.html"); }
/** * Create a property mapping based on the initial values in the deployment descriptor. * * @param dd * @return */ public static Map<QName, Node> calcInitialProperties( Properties properties, TDeployment.Process dd) { HashMap<QName, Node> ret = new HashMap<QName, Node>(); for (Object key1 : properties.keySet()) { String key = (String) key1; Document doc = DOMUtils.newDocument(); doc.appendChild(doc.createElementNS(null, "temporary-simple-type-wrapper")); doc.getDocumentElement().appendChild(doc.createTextNode(properties.getProperty(key))); ret.put(new QName(key), doc.getDocumentElement()); } for (TDeployment.Process.Property property : dd.getPropertyArray()) { Element elmtContent = DOMUtils.getElementContent(property.getDomNode()); if (elmtContent != null) { // We'll need DOM Level 3 Document doc = DOMUtils.newDocument(); doc.appendChild(doc.importNode(elmtContent, true)); ret.put(property.getName(), doc.getDocumentElement()); } else ret.put(property.getName(), property.getDomNode().getFirstChild()); } return ret; }
public VariableResolverImpl getVariableResolver() { try { VariableResolverImpl resolver = null; if (dataImporter != null && dataImporter.getCore() != null) { resolver = new VariableResolverImpl( dataImporter.getCore().getResourceLoader().getCoreProperties()); } else resolver = new VariableResolverImpl(); Map<String, Object> indexerNamespace = new HashMap<String, Object>(); if (persistedProperties.getProperty(LAST_INDEX_TIME) != null) { indexerNamespace.put(LAST_INDEX_TIME, persistedProperties.getProperty(LAST_INDEX_TIME)); } else { // set epoch indexerNamespace.put(LAST_INDEX_TIME, DataImporter.DATE_TIME_FORMAT.get().format(EPOCH)); } indexerNamespace.put(INDEX_START_TIME, dataImporter.getIndexStartTime()); indexerNamespace.put("request", requestParameters.requestParams); indexerNamespace.put("functions", functionsNamespace); for (DataConfig.Entity entity : dataImporter.getConfig().document.entities) { String key = entity.name + "." + SolrWriter.LAST_INDEX_KEY; String lastIndex = persistedProperties.getProperty(key); if (lastIndex != null) { indexerNamespace.put(key, lastIndex); } else { indexerNamespace.put(key, DataImporter.DATE_TIME_FORMAT.get().format(EPOCH)); } } resolver.addNamespace(DataConfig.IMPORTER_NS_SHORT, indexerNamespace); resolver.addNamespace(DataConfig.IMPORTER_NS, indexerNamespace); return resolver; } catch (Exception e) { wrapAndThrow(SEVERE, e); // unreachable statement return null; } }
/** * Get the response data type to send to the client, based off of the output format requested * from CoreNLP. * * @param props The properties being used by CoreNLP. * @param of The output format being output by CoreNLP. * @return An identifier for the type of the HTTP response (e.g., 'text/json'). */ public String getContentType(Properties props, StanfordCoreNLP.OutputFormat of) { switch (of) { case JSON: return "text/json"; case TEXT: case CONLL: return "text/plain"; case XML: return "text/xml"; case SERIALIZED: String outputSerializerName = props.getProperty("outputSerializer"); if (outputSerializerName != null && outputSerializerName.equals(ProtobufAnnotationSerializer.class.getName())) { return "application/x-protobuf"; } default: return "application/octet-stream"; } }
/** * Loads test configuration. * * @throws Exception if configuration is unawailable or broken. */ private void loadTestConfiguration() throws Exception { assert TEST_CONFIGURATION_FILE.isFile(); InputStream in = null; Properties p = new Properties(); try { in = new FileInputStream(TEST_CONFIGURATION_FILE); p.load(in); } finally { U.closeQuiet(in); } clientNodes = Integer.parseInt(p.getProperty("client.nodes.count")); srvNodes = Integer.parseInt(p.getProperty("server.nodes.count")); threadsPerClient = Integer.parseInt(p.getProperty("threads.per.client")); cancelRate = Integer.parseInt(p.getProperty("cancel.rate")); submitDelay = Long.parseLong(p.getProperty("submit.delay")); taskParams = new GridJobLoadTestParams( Integer.parseInt(p.getProperty("jobs.count")), Integer.parseInt(p.getProperty("jobs.test.duration")), Integer.parseInt(p.getProperty("jobs.test.completion.delay")), Double.parseDouble(p.getProperty("jobs.failure.probability"))); }
@Override public void handle(HttpExchange httpExchange) throws IOException { // Set common response headers httpExchange.getResponseHeaders().add("Access-Control-Allow-Origin", "*"); // Get sentence. Properties props; Annotation ann; StanfordCoreNLP.OutputFormat of; log("[" + httpExchange.getRemoteAddress() + "] Received message"); try { props = getProperties(httpExchange); ann = getDocument(props, httpExchange); of = StanfordCoreNLP.OutputFormat.valueOf( props.getProperty("outputFormat", "json").toUpperCase()); // Handle direct browser connections (i.e., not a POST request). if (ann.get(CoreAnnotations.TextAnnotation.class).length() == 0) { log("[" + httpExchange.getRemoteAddress() + "] Interactive connection"); staticPageHandle.handle(httpExchange); return; } log("[" + httpExchange.getRemoteAddress() + "] API call"); } catch (Exception e) { // Return error message. e.printStackTrace(); String response = e.getMessage(); httpExchange.getResponseHeaders().add("Content-Type", "text/plain"); httpExchange.sendResponseHeaders(HTTP_BAD_INPUT, response.length()); httpExchange.getResponseBody().write(response.getBytes()); httpExchange.close(); return; } try { // Annotate StanfordCoreNLP pipeline = mkStanfordCoreNLP(props); Future<Annotation> completedAnnotationFuture = corenlpExecutor.submit( () -> { pipeline.annotate(ann); return ann; }); Annotation completedAnnotation = completedAnnotationFuture.get(5, TimeUnit.SECONDS); // Get output ByteArrayOutputStream os = new ByteArrayOutputStream(); StanfordCoreNLP.createOutputter(props, AnnotationOutputter.getOptions(pipeline)) .accept(completedAnnotation, os); os.close(); byte[] response = os.toByteArray(); httpExchange.getResponseHeaders().add("Content-Type", getContentType(props, of)); httpExchange.getResponseHeaders().add("Content-Length", Integer.toString(response.length)); httpExchange.sendResponseHeaders(HTTP_OK, response.length); httpExchange.getResponseBody().write(response); httpExchange.close(); } catch (TimeoutException e) { respondError("CoreNLP request timed out", httpExchange); } catch (Exception e) { // Return error message. respondError(e.getClass().getName() + ": " + e.getMessage(), httpExchange); } }
@SuppressWarnings("unchecked") public void execute() { dataImporter.store(DataImporter.STATUS_MSGS, statusMessages); document = dataImporter.getConfig().document; final AtomicLong startTime = new AtomicLong(System.currentTimeMillis()); statusMessages.put( TIME_ELAPSED, new Object() { public String toString() { return getTimeElapsedSince(startTime.get()); } }); statusMessages.put(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, importStatistics.queryCount); statusMessages.put(DataImporter.MSG.TOTAL_ROWS_EXECUTED, importStatistics.rowsCount); statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, importStatistics.docCount); statusMessages.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED, importStatistics.skipDocCount); List<String> entities = requestParameters.entities; // Trigger onImportStart if (document.onImportStart != null) { invokeEventListener(document.onImportStart); } AtomicBoolean fullCleanDone = new AtomicBoolean(false); // we must not do a delete of *:* multiple times if there are multiple root entities to be run Properties lastIndexTimeProps = new Properties(); lastIndexTimeProps.setProperty( LAST_INDEX_KEY, DataImporter.DATE_TIME_FORMAT.get().format(dataImporter.getIndexStartTime())); for (DataConfig.Entity e : document.entities) { if (entities != null && !entities.contains(e.name)) continue; lastIndexTimeProps.setProperty( e.name + "." + LAST_INDEX_KEY, DataImporter.DATE_TIME_FORMAT.get().format(new Date())); root = e; String delQuery = e.allAttributes.get("preImportDeleteQuery"); if (dataImporter.getStatus() == DataImporter.Status.RUNNING_DELTA_DUMP) { cleanByQuery(delQuery, fullCleanDone); doDelta(); delQuery = e.allAttributes.get("postImportDeleteQuery"); if (delQuery != null) { fullCleanDone.set(false); cleanByQuery(delQuery, fullCleanDone); } } else { cleanByQuery(delQuery, fullCleanDone); doFullDump(); delQuery = e.allAttributes.get("postImportDeleteQuery"); if (delQuery != null) { fullCleanDone.set(false); cleanByQuery(delQuery, fullCleanDone); } } statusMessages.remove(DataImporter.MSG.TOTAL_DOC_PROCESSED); } if (stop.get()) { // Dont commit if aborted using command=abort statusMessages.put("Aborted", DataImporter.DATE_TIME_FORMAT.get().format(new Date())); rollback(); } else { // Do not commit unnecessarily if this is a delta-import and no documents were created or // deleted if (!requestParameters.clean) { if (importStatistics.docCount.get() > 0 || importStatistics.deletedDocCount.get() > 0) { finish(lastIndexTimeProps); } } else { // Finished operation normally, commit now finish(lastIndexTimeProps); } if (writer != null) { writer.finish(); } if (document.onImportEnd != null) { invokeEventListener(document.onImportEnd); } } statusMessages.remove(TIME_ELAPSED); statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, "" + importStatistics.docCount.get()); if (importStatistics.failedDocCount.get() > 0) statusMessages.put( DataImporter.MSG.TOTAL_FAILED_DOCS, "" + importStatistics.failedDocCount.get()); statusMessages.put("Time taken ", getTimeElapsedSince(startTime.get())); LOG.info("Time taken = " + getTimeElapsedSince(startTime.get())); }
public static void main(String args[]) { String worldName = System.getProperty("multiverse.worldname"); Properties properties = InitLogAndPid.initLogAndPid(args, worldName, null); System.err.println("Multiverse server version " + ServerVersion.getVersionString()); List<String> agentNames = new LinkedList<String>(); LongOpt[] longopts = new LongOpt[2]; longopts[0] = new LongOpt("pid", LongOpt.REQUIRED_ARGUMENT, null, 2); longopts[1] = new LongOpt("port", LongOpt.REQUIRED_ARGUMENT, null, 3); Getopt opt = new Getopt("DomainServer", args, "a:m:t:p:P:", longopts); int c; int port = DEFAULT_PORT; String portStr = properties.getProperty("multiverse.msgsvr_port"); if (portStr != null) port = Integer.parseInt(portStr); PluginStartGroup pluginStartGroup = new PluginStartGroup(); while ((c = opt.getopt()) != -1) { switch (c) { case 'a': agentNames.add(opt.getOptarg()); break; case 't': case 'm': // ignore RuntimeMarshalling flags opt.getOptarg(); break; case 'p': String pluginSpec = opt.getOptarg(); String[] pluginDef = pluginSpec.split(",", 2); if (pluginDef.length != 2) { System.err.println("Invalid plugin spec format: " + pluginSpec); Log.error("Invalid plugin spec format: " + pluginSpec); System.exit(1); } int expected = Integer.parseInt(pluginDef[1]); pluginStartGroup.add(pluginDef[0], expected); break; case '?': System.exit(1); break; case 'P': break; case 2: // ignore --pid opt.getOptarg(); break; // port case 3: String arg = opt.getOptarg(); port = Integer.parseInt(arg); break; default: break; } } String svrName = System.getProperty("multiverse.loggername"); String runDir = System.getProperty("multiverse.rundir"); // Windows non-Cygwin only - save process ID for status script if (System.getProperty("os.name").contains("Windows") && svrName != null && runDir != null) { saveProcessID(svrName, runDir); } // parse command-line options domainServer = new DomainServer(port); domainServer.setAgentNames(agentNames); domainServer.setWorldName(worldName); domainServer.start(); pluginStartGroup.prepareDependencies(properties, worldName); domainServer.addPluginStartGroup(pluginStartGroup); pluginStartGroup.pluginAvailable("Domain", "Domain"); String timeoutStr = properties.getProperty("multiverse.startup_timeout"); int timeout = 120; if (timeoutStr != null) { timeout = Integer.parseInt(timeoutStr); } ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); ScheduledFuture<?> timeoutHandler = scheduler.schedule(new TimeoutRunnable(timeout), timeout, TimeUnit.SECONDS); javax.crypto.SecretKey domainKey = SecureTokenUtil.generateDomainKey(); // XXX Use a random keyID for now. Ideally, this would be semi-unique. long keyId = new Random().nextLong(); encodedDomainKey = Base64.encodeBytes(SecureTokenUtil.encodeDomainKey(keyId, domainKey)); Log.debug("generated domain key: " + encodedDomainKey); try { pluginStartGroup.awaitDependency("Domain"); timeoutHandler.cancel(false); String availableMessage = properties.getProperty("multiverse.world_available_message"); String availableFile = properties.getProperty("multiverse.world_available_file"); if (availableFile != null) touchFile(FileUtil.expandFileName(availableFile)); if (availableMessage != null) System.err.println("\n" + availableMessage); while (true) { Thread.sleep(10000000); } } catch (Exception ex) { Log.exception("DomainServer.main", ex); } }
public void load(String queryFile, String modules, String tables) throws SQLException, IOException, InterruptedException, ExecutionException { Properties properties = new Properties(); properties.load(new FileInputStream(queryFile)); Collection<String> keys = properties.stringPropertyNames(); // Filtering by validating if property starts with any of the module names if (!Config.ALL.equalsIgnoreCase(modules)) { keys = Util.filter( keys, "^(" + modules.replaceAll(Config.COMMA_SEPARATOR, Config.MODULE_SUFFIX) + ")"); } // Filtering by table names if (!Config.ALL.equalsIgnoreCase(tables)) { keys = Util.filter( keys, "(" + tables.replaceAll(Config.COMMA_SEPARATOR, Config.TABLE_SUFFIX) + ")$"); } logger.info("The final modules and tables that are being considered" + keys.toString()); ExecutorService executor = Executors.newFixedThreadPool(keys.size() * 3); CompletionService completion = new ExecutorCompletionService(executor); for (String key : keys) { String query = properties.getProperty(key); key = (key.contains(Config.DOT_SEPARATOR) ? key.substring(key.indexOf(Config.DOT_SEPARATOR) + 1) : key); while (query.contains("[:")) { String param = query.substring(query.indexOf("[:") + 2, query.indexOf("]")); query = query.replaceFirst("\\[\\:" + param + "\\]", properties.getProperty(param)); } int pages = 1; String base = ""; if (config.srisvoltdb) { if (config.isPaginated) { try { // find count String countquery = query; if (countquery.contains("<") || countquery.contains(">")) { int bracketOpen = countquery.indexOf("<"); int bracketClose = countquery.indexOf(">"); String orderCol = countquery.substring(bracketOpen + 1, bracketClose); countquery = countquery.replace("<" + orderCol + ">", ""); } VoltTable vcount = client.callProcedure("@AdHoc", countquery).getResults()[0]; int count = vcount.getRowCount(); pages = (int) Math.ceil((double) count / config.pageSize); } catch (Exception e) { System.out.println("Count formation failure!"); } } // set up data in order } else { // find count String countquery = query.replace("*", "COUNT(*)"); Connection conn = DriverManager.getConnection(config.jdbcurl, config.jdbcuser, config.jdbcpassword); base = conn.getMetaData().getDatabaseProductName().toLowerCase(); System.out.println("BASE: " + base); Statement jdbcStmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); if (countquery.contains("<") || countquery.contains(">")) { int bracketOpen = countquery.indexOf("<"); int bracketClose = countquery.indexOf(">"); String orderCol = countquery.substring(bracketOpen + 1, bracketClose); countquery = countquery.replace("<" + orderCol + ">", ""); } ResultSet rcount = jdbcStmt.executeQuery(countquery); rcount.next(); int count = Integer.parseInt(rcount.getArray(1).toString()); // THIS IF NEEDS A WAY TO DETERMINE IF POSTGRES if (base.contains("postgres") && config.isPaginated) { pages = (int) Math.ceil((double) count / config.pageSize); } // set up data in order } // establish new SourceReaders and DestinationWriters for pages SourceReader[] sr = new SourceReader[pages]; DestinationWriter[] cr = new DestinationWriter[pages]; for (int i = 0; i < pages; i++) { sr[i] = new SourceReader(); cr[i] = new DestinationWriter(); } Controller processor = new Controller<ArrayList<Object[]>>( client, sr, cr, query, key.toUpperCase() + ".insert", config, pages, base); completion.submit(processor); } // wait for all tasks to complete. for (int i = 0; i < keys.size(); ++i) { logger.info( "****************" + completion.take().get() + " completed *****************"); // will block until the next sub task has // completed. } executor.shutdown(); }
/** * Initializes store. * * @throws GridException If failed to initialize. */ private void init() throws GridException { if (initGuard.compareAndSet(false, true)) { if (log.isDebugEnabled()) log.debug("Initializing cache store."); try { if (sesFactory != null) // Session factory has been provided - nothing to do. return; if (!F.isEmpty(hibernateCfgPath)) { try { URL url = new URL(hibernateCfgPath); sesFactory = new Configuration().configure(url).buildSessionFactory(); if (log.isDebugEnabled()) log.debug("Configured session factory using URL: " + url); // Session factory has been successfully initialized. return; } catch (MalformedURLException e) { if (log.isDebugEnabled()) log.debug("Caught malformed URL exception: " + e.getMessage()); } // Provided path is not a valid URL. File? File cfgFile = new File(hibernateCfgPath); if (cfgFile.exists()) { sesFactory = new Configuration().configure(cfgFile).buildSessionFactory(); if (log.isDebugEnabled()) log.debug("Configured session factory using file: " + hibernateCfgPath); // Session factory has been successfully initialized. return; } // Provided path is not a file. Classpath resource? sesFactory = new Configuration().configure(hibernateCfgPath).buildSessionFactory(); if (log.isDebugEnabled()) log.debug("Configured session factory using classpath resource: " + hibernateCfgPath); } else { if (hibernateProps == null) { U.warn( log, "No Hibernate configuration has been provided for store (will use default)."); hibernateProps = new Properties(); hibernateProps.setProperty("hibernate.connection.url", DFLT_CONN_URL); hibernateProps.setProperty("hibernate.show_sql", DFLT_SHOW_SQL); hibernateProps.setProperty("hibernate.hbm2ddl.auto", DFLT_HBM2DDL_AUTO); } Configuration cfg = new Configuration(); cfg.setProperties(hibernateProps); assert resourceAvailable(MAPPING_RESOURCE); cfg.addResource(MAPPING_RESOURCE); sesFactory = cfg.buildSessionFactory(); if (log.isDebugEnabled()) log.debug("Configured session factory using properties: " + hibernateProps); } } catch (HibernateException e) { throw new GridException("Failed to initialize store.", e); } finally { initLatch.countDown(); } } else if (initLatch.getCount() > 0) U.await(initLatch); if (sesFactory == null) throw new GridException("Cache store was not properly initialized."); }
/** * Load client configuration from the properties map. * * @param prefix Prefix for the client properties. * @param in Properties map to load configuration from. * @throws GridClientException If parsing configuration failed. */ public void load(String prefix, Properties in) throws GridClientException { while (prefix.endsWith(".")) prefix = prefix.substring(0, prefix.length() - 1); if (!prefix.isEmpty()) prefix += "."; String balancer = in.getProperty(prefix + "balancer"); String connectTimeout = in.getProperty(prefix + "connectTimeout"); String cred = in.getProperty(prefix + "credentials"); String autoFetchMetrics = in.getProperty(prefix + "autoFetchMetrics"); String autoFetchAttrs = in.getProperty(prefix + "autoFetchAttributes"); String maxConnIdleTime = in.getProperty(prefix + "idleTimeout"); String proto = in.getProperty(prefix + "protocol"); String srvrs = in.getProperty(prefix + "servers"); String tcpNoDelay = in.getProperty(prefix + "tcp.noDelay"); String topRefreshFreq = in.getProperty(prefix + "topology.refresh"); String sslEnabled = in.getProperty(prefix + "ssl.enabled"); String sslProto = in.getProperty(prefix + "ssl.protocol", "TLS"); String sslKeyAlg = in.getProperty(prefix + "ssl.key.algorithm", "SunX509"); String keyStorePath = in.getProperty(prefix + "ssl.keystore.location"); String keyStorePwd = in.getProperty(prefix + "ssl.keystore.password"); String keyStoreType = in.getProperty(prefix + "ssl.keystore.type"); String trustStorePath = in.getProperty(prefix + "ssl.truststore.location"); String trustStorePwd = in.getProperty(prefix + "ssl.truststore.password"); String trustStoreType = in.getProperty(prefix + "ssl.truststore.type"); String dataCfgs = in.getProperty(prefix + "data.configurations"); setBalancer(resolveBalancer(balancer)); if (!F.isEmpty(connectTimeout)) setConnectTimeout(Integer.parseInt(connectTimeout)); if (!F.isEmpty(cred)) { int idx = cred.indexOf(':'); if (idx >= 0 && idx < cred.length() - 1) { setSecurityCredentialsProvider( new SecurityCredentialsBasicProvider( new SecurityCredentials(cred.substring(0, idx), cred.substring(idx + 1)))); } else { setSecurityCredentialsProvider( new SecurityCredentialsBasicProvider(new SecurityCredentials(null, null, cred))); } } if (!F.isEmpty(autoFetchMetrics)) setAutoFetchMetrics(Boolean.parseBoolean(autoFetchMetrics)); if (!F.isEmpty(autoFetchAttrs)) setAutoFetchAttributes(Boolean.parseBoolean(autoFetchAttrs)); if (!F.isEmpty(maxConnIdleTime)) setMaxConnectionIdleTime(Integer.parseInt(maxConnIdleTime)); if (!F.isEmpty(proto)) setProtocol(GridClientProtocol.valueOf(proto)); if (!F.isEmpty(srvrs)) setServers(Arrays.asList(srvrs.replaceAll("\\s+", "").split(","))); if (!F.isEmpty(tcpNoDelay)) setTcpNoDelay(Boolean.parseBoolean(tcpNoDelay)); if (!F.isEmpty(topRefreshFreq)) setTopologyRefreshFrequency(Long.parseLong(topRefreshFreq)); // // SSL configuration section // if (!F.isEmpty(sslEnabled) && Boolean.parseBoolean(sslEnabled)) { GridSslBasicContextFactory factory = new GridSslBasicContextFactory(); factory.setProtocol(F.isEmpty(sslProto) ? "TLS" : sslProto); factory.setKeyAlgorithm(F.isEmpty(sslKeyAlg) ? "SunX509" : sslKeyAlg); if (F.isEmpty(keyStorePath)) throw new IllegalArgumentException("SSL key store location is not specified."); factory.setKeyStoreFilePath(keyStorePath); if (keyStorePwd != null) factory.setKeyStorePassword(keyStorePwd.toCharArray()); factory.setKeyStoreType(F.isEmpty(keyStoreType) ? "jks" : keyStoreType); if (F.isEmpty(trustStorePath)) factory.setTrustManagers(GridSslBasicContextFactory.getDisabledTrustManager()); else { factory.setTrustStoreFilePath(trustStorePath); if (trustStorePwd != null) factory.setTrustStorePassword(trustStorePwd.toCharArray()); factory.setTrustStoreType(F.isEmpty(trustStoreType) ? "jks" : trustStoreType); } setSslContextFactory(factory); } // // Data configuration section // if (!F.isEmpty(dataCfgs)) { String[] names = dataCfgs.replaceAll("\\s+", "").split(","); Collection<GridClientDataConfiguration> list = new ArrayList<>(); for (String cfgName : names) { if (F.isEmpty(cfgName)) continue; String name = in.getProperty(prefix + "data." + cfgName + ".name"); String bal = in.getProperty(prefix + "data." + cfgName + ".balancer"); String aff = in.getProperty(prefix + "data." + cfgName + ".affinity"); GridClientDataConfiguration dataCfg = new GridClientDataConfiguration(); dataCfg.setName(F.isEmpty(name) ? null : name); dataCfg.setBalancer(resolveBalancer(bal)); dataCfg.setAffinity(resolveAffinity(aff)); list.add(dataCfg); } setDataConfigurations(list); } }