private static void waitAQuarterSecond() { try { Thread.sleep(250); } catch (InterruptedException e) { e.printStackTrace(); } }
@Test public void testCreateBroker() { System.out.println("testCreateBroker"); Platform.runLater( new Runnable() { @Override public void run() { synchronized (lock) { // Setup act = new ChildActivity(); act.setScene(new HBox()); Response response = new Response(); response.setStatus(Status.OK); response.setActivity(act); browser.getBrowserPanel().receiveResponse(response); } } }); try { Thread.sleep(100); } catch (InterruptedException ex) { ex.printStackTrace(); } synchronized (lock) { assertNotNull(act.createBroker()); } }
@Test public void testObserveOnWithSlowConsumer() { int NUM = (int) (Observable.bufferSize() * 0.2); AtomicInteger c = new AtomicInteger(); TestSubscriber<Integer> ts = new TestSubscriber<>(); incrementingIntegers(c) .observeOn(Schedulers.computation()) .map( i -> { try { Thread.sleep(1); } catch (InterruptedException e) { e.printStackTrace(); } return i; }) .take(NUM) .subscribe(ts); ts.awaitTerminalEvent(); ts.assertNoErrors(); System.out.println( "testObserveOnWithSlowConsumer => Received: " + ts.valueCount() + " Emitted: " + c.get()); assertEquals(NUM, ts.valueCount()); assertTrue(c.get() < Observable.bufferSize() * 2); }
void waitUntilReady() { synchronized (readyLock) { while (!isReady) { try { readyLock.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } } logger.debug("Done waiting for Client ({}) to be ready.", id); }
void waitForCompletion() { synchronized (completeLock) { while (!isComplete) { try { completeLock.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } } logger.debug("Done waiting for Client ({}) to complete.", id); }
public void test3(String fileName) throws ClassNotFoundException { if (null != dao) { System.out.println(" >> dao is not null"); // List<String[]> list = CvsFileParser.getCSV("tac.csv"); List<String[]> list = CvsFileParser.getCSV(fileName, 1000000); int offset = 0; int foffset = 0; Tac tac = null; // List<String> outL = new ArrayList<String>(); // List<Tac> tList = new ArrayList<Tac>(); HashMap ok = new HashMap(); HashMap failure = new HashMap(); HashMap common = new HashMap(); String ttac = ""; for (String[] s : list) { // Integer i=1; tac = new Tac(); // ttac = s[0]; String phoneNum = s[0]; // String uid = s[2]; String imei = s[1]; String bd = s[2]; // try{ // ttac = imei.substring(0,8); // tac.setTac(ttac); // }catch(Exception e){ // System.err.println(" >> "+imei); // continue; // } // Tac tmp = dao.findByCondition(Tac.class, Cnd.where("d_tac", "=", ttac)); // if(null == tmp){ //// failure.put(phoneNum+"_"+imei, tmp); // foffset ++; // }else{ // //// tList.add(tmp); // ok.put(ttac,tmp); //// if(null != tmp.getHsmanName()){ //// ok.put(tmp.getHsmanName()+"_"+tmp.getHstypeName(), tmp); //// }else{ //// ok.put(tmp.getHsmanNameEn()+"_"+tmp.getHstypeNameEn(), tmp); //// } // offset++; BCOUNT++; // } SB_SQL.append( "insert into tbl_cmdata (d_phonenum,d_imei,d_bistrict) values (\'" + phoneNum + "\',\'" + imei + "\',\'" + bd + "\')"); } // System.out.println(" >> 成功|offset:"+offset); // System.out.println(" >> 失败|foffset:"+foffset); // StringBuffer sb = new StringBuffer(); // System.out.println(" >> 识别的号码数量为:"+ok.size()); // System.out.println(" >> 未被识别的号码数量为:"+failure.size()); // System.out.println(" >> 总共的号码数量:"+common.size()); // int s = 0; // Iterator it = ok.keySet().iterator(); // HashMap<String,String> HSMAN_MAP = new HashMap<String,String>(); // while(it.hasNext()){ // String key = (String)it.next(); // Object obj = ok.get(key) ; // if( null != obj){ // String[] t = key.split("_"); // Tac value = (Tac)obj; // // sb.append(t[0]).append(",").append(t[1]).append(",").append(t[1]).append(",").append(t[1].substring(0,8)).append(",").append(value.getHsmanName()).append(",").append(value.getHsmanNameEn()).append(",").append(value.getHstypeName()).append(",").append(value.getHstypeNameEn()).append(",").append(1).append("\r\n"); // // sb.append(key).append(",").append(value.getHsmanName()).append(",").append(value.getHsmanNameEn()).append(",").append(value.getHstypeName()).append(",").append(value.getHstypeNameEn()).append(",").append(1).append("\r\n"); // // sb.append(value.getHsmanName()).append(",").append(value.getHsmanNameEn()).append(",").append(value.getHstypeName()).append(",").append(value.getHstypeNameEn()).append("\r\n"); // if(null != value.getHsmanName() && !"".equals(value.getHsmanName())){ // HSMAN_MAP.put(value.getHsmanName(), value.getHsmanName()); // }else if(null != value.getHsmanNameEn() && !"".equals(value.getHsmanNameEn())){ // HSMAN_MAP.put(value.getHsmanNameEn(), value.getHsmanNameEn()); // } // s++; // } // } /** * sb.append("\r\n"+s); IOUtil.createFile(sb.toString(), * "2011-07-11_已识别的终端数据_"+"_OK"+System.currentTimeMillis()); */ // Iterator its = HSMAN_MAP.keySet().iterator(); // while(its.hasNext()){ // String value = (String)its.next(); // sb.append(value).append("\r\n"); // } // IOUtil.createFile(sb.toString(), "厂商数据"+"_OK"+System.currentTimeMillis()); // BCOUNT += offset; try { Thread.sleep(2000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { System.out.println(" >> dao is null"); } }
/* * Tests to make sure the caching system works. * Compares the results of using cached/non-cached features. * * Takes about 1 minute, depending on the authors selected and how many documents they have. */ @Test public void scalingCached() throws Exception { // TODO: Tests generally should have no randomness. We should perform these tests on a set of // static problem sets // that the test uses every time. int num_authors = 2; System.out.println(num_authors); File source = Paths.get(JSANConstants.JSAN_CORPORA_PREFIX, "drexel_1").toFile(); Assert.assertTrue( "You don't have the specified corpora available.", source.exists() && source.isDirectory()); // This can be anything. Problem set location File dest = Paths.get(JSANConstants.JUNIT_RESOURCE_PACKAGE, "temp", "cache_test_.xml").toFile(); GenericProblemSetCreator.createProblemSetsWithSize(source, dest, num_authors, 10); File[] problem_sets = BekahUtil.listNotHiddenFiles(dest.getParentFile()); for (File problem_set : problem_sets) { // Delete the cache in the beginning // File to cache deleteRecursive(new File(JSANConstants.JSAN_CACHE), false); try { Thread.sleep(20); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } System.out.println(problem_set); ProblemSet p = new ProblemSet(problem_set.getAbsolutePath()); Set<String> authors = p.getAuthors(); for (String a : authors) { System.out.print(a + ","); } System.out.println(""); Path path = Paths.get(JSANConstants.JSAN_FEATURESETS_PREFIX, "writeprints_feature_set_limited.xml"); FullAPI test = new FullAPI.Builder() .cfdPath(path.toString()) .psPath(problem_set.getAbsolutePath()) .setAnalyzer( new WekaAnalyzer(Class.forName("weka.classifiers.functions.SMO").newInstance())) .numThreads(4) .analysisType(FullAPI.analysisType.CROSS_VALIDATION) .build(); long bef1 = System.currentTimeMillis(); test.prepareInstances(); test.run(); ExperimentResults r1 = test.getResults(); long aft1 = System.currentTimeMillis(); String result1 = r1.getStatisticsString(); System.out.println(result1); FullAPI test2 = new FullAPI.Builder() .cfdPath(path.toString()) .psPath(problem_set.getAbsolutePath()) .setAnalyzer( new WekaAnalyzer(Class.forName("weka.classifiers.functions.SMO").newInstance())) .numThreads(4) .analysisType(FullAPI.analysisType.CROSS_VALIDATION) .build(); long bef2 = System.currentTimeMillis(); test2.prepareInstances(); test2.run(); ExperimentResults r2 = test2.getResults(); long aft2 = System.currentTimeMillis(); String result2 = r2.getStatisticsString(); System.out.println(result2); Assert.assertEquals("Cached results different from non-cached results", result1, result2); System.out.print((aft1 - bef1) / 1000 + " s\t"); System.out.println((aft2 - bef2) / 1000 + " s"); } }
@Test public void testStoringMetricHour() { DateTime fromDate = new DateTime(2012, 04, 18, 9, 0, 0); DateTime toDate = new DateTime(2012, 04, 18, 12, 0, 0); // DateTime fromDate = new DateTime(2010, 01, 01, 0, 0, 0); // DateTime toDate = new DateTime(2010, 12, 31, 23, 59, 59); Long from15SecPeriod = fromDate.getMillis() / 15000; Long to15SecPeriod = toDate.getMillis() / 15000; long numMetrics = to15SecPeriod - from15SecPeriod; long index = 0; while (index <= numMetrics) { for (int i = 0; i < 100; i++) { List<LiveStatistics> liveStatisticsList = new ArrayList<LiveStatistics>(); liveStatisticsList.add( new TestLiveStatistics( "EurekaJAgent:Memory:Heap:Used" + i, "ACCOUNT NAME", from15SecPeriod + index, new Double(index), ValueType.AGGREGATE.value(), UnitType.N.value())); newEnv.getLiveStatissticsDao().storeIncomingStatistics(liveStatisticsList); } if (index > 0 && index % 500 == 0) { logger.info( "stored 500 keys for: " + index + " 15 second time periods. Now at: " + index + " of " + numMetrics + " hours"); } index++; } try { Thread.sleep(15000); } catch (InterruptedException e) { e .printStackTrace(); // To change body of catch statement use File | Settings | File // Templates. } System.out.println("Stored " + index + " values in the database"); int expectedNumMetrics = (4 * 60 * 3) + 1; // 3 hours worth of metrics // expectedNumMetrics = (4 * 60 * 24 * 365); //31 days worth of metrics List<LiveStatistics> statList = newEnv .getLiveStatissticsDao() .getLiveStatistics("Test:A", "ACCOUNT NAME", from15SecPeriod, to15SecPeriod); Assert.assertEquals( "Expecting " + expectedNumMetrics + " LiveStatistcs back from DB", expectedNumMetrics, statList.size()); // newEnv.getLiveStatissticsDao().deleteLiveStatisticsOlderThan(new DateTime(2010, 12, 14, 23, // 59, 59).toDate(), "ACCOUNT NAME"); // newEnv.getLiveStatissticsDao().markLiveStatisticsAsCalculated("Test:A", "ACCOUNT NAME", // from15SecPeriod, to15SecPeriod); // newEnv.getLiveStatissticsDao().deleteMarkedLiveStatistics(); /*statList = newEnv.getLiveStatissticsDao().getLiveStatistics("Test:A", "ACCOUNT NAME", from15SecPeriod, to15SecPeriod); for (LiveStatistics stat : statList) { printStat(stat); Assert.assertNull("Expecting that value is NULL for timeperiod: " + stat.getTimeperiod(), stat.getValue()); }*/ }