/** * This is the only method exposed as the API call loads multiple configuration files from * specified URI an invokes a background {@link SearchAgent} callable task which in turn does the * action property collection from various streams. This is the method that is exposed as API * call. * * @params resourses String containg space separated uri where data will be extracted from. * @params attribute String containg space separated keywords to look for. * @return List class containing the loaded SearchObjectCache. */ public static List<SearchObjectCache> search(String resources, String attributes, int agent) throws InterruptedException { // "crawling data sources"; ExecutorService service = Executors.newFixedThreadPool(10); List<SearchObjectCache> records = new ArrayList<SearchObjectCache>(); java.util.List<String> sources = Arrays.asList(resources.split(" ")); List<Future<SearchObjectCache>> tasks = new ArrayList<Future<SearchObjectCache>>(); try { // for(String source:sources){ sources.stream().forEach(source -> add(service, tasks, agent, source, attributes)); for (Future<SearchObjectCache> task : tasks) { records.addAll((List<SearchObjectCache>) task.get()); } service.shutdown(); service.awaitTermination(5, TimeUnit.SECONDS); } catch (Exception ex) { logger.info("error loading resources "); ex.printStackTrace(); logger.log(Level.SEVERE, null, ex); } finally { if (!service.isTerminated()) { logger.info("Cancel non-finish tasks"); } service.shutdownNow(); } logger.info("Task is completed, let's check result"); logger.info("Document search completed\n"); return records; }
public boolean stop() { scheduler.stop(); try { ZKManager zkManager = new ZKManager(PropertiesUtil.loadProperties()); String serverPathStr = CommonConstants.ZK_ROOT_PATH + "/server"; List<String> serverNodeList = zkManager.getZooKeeper().getChildren(serverPathStr, false); for (int i = 0; (serverNodeList != null) && (i < serverNodeList.size()); i++) { String id = serverNodeList.get(i); String c = zkManager.getData(serverPathStr + "/" + id); if (c == null) { continue; } BasicDBObject record = (BasicDBObject) com.mongodb.util.JSON.parse(c); String ip = (String) record.get(CommonConstants.IP); if (!StringUtil.isEmpty(ip) && IpUtil.getLocalIP().equals(ip)) { zkManager.delete(serverPathStr + "/" + id); } } zkManager.close(); } catch (Exception e) { if (logger.isDebugEnabled()) e.printStackTrace(); logger.error("ModuleSchedulerServer-->>stop() error ", e); } return true; }
public static void main(String[] args) { SearchBroker broker = SearchBroker.newInstance(); try { broker.scheduleJobs(); } catch (Exception e) { System.out.println("error starting jobs.."); e.printStackTrace(); } }
private boolean addJob(ServerJob serverJob) { logger.error("addJob -> MongoDBUtil: insertOrUpdate >>>"); try { if (!scheduler.add(serverJob)) return false; return MongoDBUtil.INSTANCE.insertOrUpdate( BuildMongoDBData.getInsertJobBasicDBObject(serverJob), DBTableInfo.TBL_CLOVER_JOB); } catch (Exception e) { logger.error("ModuleSchedulerServer-->>addJob(" + serverJob.toString() + ") error", e); String execMethod = "ModuleSchedulerServer-->>addJob(" + serverJob.toString() + ")"; String execResult = "ModuleSchedulerServer-->>addJob(" + serverJob.toString() + ") error ," + e.getMessage(); MongoDBUtil.INSTANCE.insert( BuildMongoDBData.getInsertLogBasicDBObject( serverJob.getJobDetail().getKey().toString(), execMethod, execResult), DBTableInfo.TBL_CLOVER_LOG); return false; } }