@Test public void testPutRemoveGet() { Map<Integer, Integer> myMap = new MyMap<>(); Map<Integer, Integer> control = new HashMap<>(); for (int i = 0; i < N; i++) { int k = random.nextInt(); int v = random.nextInt(); myMap.put(k, v); control.put(k, v); } Set<Integer> keysToRemove = new HashSet<>(); for (int k : control.keySet()) { if (random.nextBoolean()) { keysToRemove.add(k); } } for (int k : keysToRemove) { control.remove(k); myMap.remove(k); } assertEquals(myMap.size(), control.size()); for (int k : control.keySet()) { assertEquals(myMap.get(k), control.get(k)); int r = random.nextInt(); assertEquals(myMap.get(r), control.get(r)); } }
@Test public void readTest() { String insertKey = "user0"; Map<String, ByteIterator> insertMap = insertRow(insertKey); HashSet<String> readFields = new HashSet<>(); HashMap<String, ByteIterator> readResultMap = new HashMap<>(); // Test reading a single field readFields.add("FIELD0"); orientDBClient.read(CLASS, insertKey, readFields, readResultMap); assertEquals( "Assert that result has correct number of fields", readFields.size(), readResultMap.size()); for (String field : readFields) { assertEquals( "Assert " + field + " was read correctly", insertMap.get(field).toString(), readResultMap.get(field).toString()); } readResultMap = new HashMap<>(); // Test reading all fields readFields.add("FIELD1"); readFields.add("FIELD2"); orientDBClient.read(CLASS, insertKey, readFields, readResultMap); assertEquals( "Assert that result has correct number of fields", readFields.size(), readResultMap.size()); for (String field : readFields) { assertEquals( "Assert " + field + " was read correctly", insertMap.get(field).toString(), readResultMap.get(field).toString()); } }
@Test public void testEPL() { // should say fieldsTypes, maybe with object/component prefix Map<String, Object> eventTypes = new HashMap<>(); eventTypes.put(LITERAL_SYMBOL, String.class); eventTypes.put(LITERAL_PRICE, Integer.class); TopologyBuilder builder = new TopologyBuilder(); builder.setSpout(LITERAL_QUOTES, new RandomSentenceSpout()); builder .setBolt( LITERAL_ESPER, (new EsperBolt()) .addEventTypes(eventTypes) .addOutputTypes( Collections.singletonMap( LITERAL_RETURN_OBJ, Arrays.asList(LITERAL_AVG, LITERAL_PRICE))) .addStatements( Collections.singleton( "insert into Result " + "select avg(price) as avg, price from " + "quotes_default(symbol='A').win:length(2) " + "having avg(price) > 60.0"))) .shuffleGrouping(LITERAL_QUOTES); builder.setBolt("print", new PrinterBolt()).shuffleGrouping(LITERAL_ESPER, LITERAL_RETURN_OBJ); Config conf = new Config(); LocalCluster cluster = new LocalCluster(); cluster.submitTopology("test", conf, builder.createTopology()); Utils.sleep(10000); cluster.shutdown(); assertEquals(resultEPL.get(100), new Double(75.0)); assertEquals(resultEPL.get(50), new Double(75.0)); }
private void doControlTask() throws IOException, ClassNotFoundException { BlockingTaskSummaryResponseHandler handler = new BlockingTaskSummaryResponseHandler(); client.getTasksAssignedAsPotentialOwner("control", "en-UK", handler); List<TaskSummary> sums = handler.getResults(); assertNotNull(sums); assertEquals(1, sums.size()); BlockingTaskOperationResponseHandler startTaskOperationHandler = new BlockingTaskOperationResponseHandler(); client.start(sums.get(0).getId(), "control", startTaskOperationHandler); BlockingGetTaskResponseHandler getTaskHandler = new BlockingGetTaskResponseHandler(); client.getTask(sums.get(0).getId(), getTaskHandler); Task controlTask = getTaskHandler.getTask(); BlockingGetContentResponseHandler getContentHandler = new BlockingGetContentResponseHandler(); client.getContent(controlTask.getTaskData().getDocumentContentId(), getContentHandler); Content content = getContentHandler.getContent(); assertNotNull(content); ByteArrayInputStream bais = new ByteArrayInputStream(content.getContent()); ObjectInputStream ois = new ObjectInputStream(bais); Map<String, Object> deserializedContent = (Map<String, Object>) ois.readObject(); Emergency retrivedEmergency = (Emergency) deserializedContent.get("emergency"); assertNotNull(retrivedEmergency); ActivePatients retrivedActivePatients = (ActivePatients) deserializedContent.get("activePatients"); assertNotNull(retrivedActivePatients); assertEquals(1, retrivedActivePatients.size()); SuggestedProcedures retrivedSuggestedProcedures = (SuggestedProcedures) deserializedContent.get("suggestedProcedures"); assertNotNull(retrivedSuggestedProcedures); assertEquals( "[DefaultHeartAttackProcedure: ]", retrivedSuggestedProcedures.getSuggestedProceduresString()); Map<String, Object> info = new HashMap<String, Object>(); SelectedProcedures selectedProcedures = new SelectedProcedures(retrivedEmergency.getId()); selectedProcedures.addSelectedProcedureName("DefaultHeartAttackProcedure"); info.put("selectedProcedures", selectedProcedures); ContentData result = new ContentData(); result.setAccessType(AccessType.Inline); result.setType("java.util.Map"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(bos); out.writeObject(info); out.close(); result.setContent(bos.toByteArray()); BlockingTaskOperationResponseHandler completeTaskOperationHandler = new BlockingTaskOperationResponseHandler(); client.complete(sums.get(0).getId(), "control", result, completeTaskOperationHandler); }
@Test public void removeFromMap() { HazelcastClient hClient = getHazelcastClient(); Map map = hClient.getMap("removeFromMap"); assertNull(map.put("a", "b")); assertEquals("b", map.get("a")); assertEquals("b", map.remove("a")); assertNull(map.remove("a")); assertNull(map.get("a")); }
@Test public void issue_38() { int max = 50000 * TT.scale(); Map<Integer, String[]> map = DBMaker.memoryDB().transactionDisable().make().treeMap("test"); for (int i = 0; i < max; i++) { map.put(i, new String[5]); } for (int i = 0; i < max; i = i + 1000) { assertTrue(Arrays.equals(new String[5], map.get(i))); assertTrue(map.get(i).toString().contains("[Ljava.lang.String")); } }
@Test public void testParameterToPairsWhenValueIsCollection() throws Exception { Map<String, String> collectionFormatMap = new HashMap<String, String>(); collectionFormatMap.put("csv", ","); collectionFormatMap.put("tsv", "\t"); collectionFormatMap.put("ssv", " "); collectionFormatMap.put("pipes", "\\|"); collectionFormatMap.put("", ","); // no format, must default to csv collectionFormatMap.put("unknown", ","); // all other formats, must default to csv String name = "param-a"; List<Object> values = new ArrayList<Object>(); values.add("value-a"); values.add(123); values.add(new Date()); // check for multi separately List<Pair> multiPairs = apiClient.parameterToPairs("multi", name, values); assertEquals(values.size(), multiPairs.size()); // all other formats for (String collectionFormat : collectionFormatMap.keySet()) { List<Pair> pairs = apiClient.parameterToPairs(collectionFormat, name, values); assertEquals(1, pairs.size()); String delimiter = collectionFormatMap.get(collectionFormat); String[] pairValueSplit = pairs.get(0).getValue().split(delimiter); // must equal input values assertEquals(values.size(), pairValueSplit.length); } }
/* http://stackoverflow.com/questions/13592236/parse-the-uri-string-into-name-value-collection-in-java */ public static Map<String, String> splitQuery(URL url) throws UnsupportedEncodingException { Map<String, String> queryPairs = new LinkedHashMap<String, String>(); String query = url.getQuery(); String[] pairs = query.split("&"); for (String pair : pairs) { int idx = pair.indexOf("="); String key = URLDecoder.decode(pair.substring(0, idx), "UTF-8"); if (queryPairs.get(key) == null) { queryPairs.put(key, URLDecoder.decode(pair.substring(idx + 1), "UTF-8")); } else { queryPairs.put( key, queryPairs.get(key) + ", " + URLDecoder.decode(pair.substring(idx + 1), "UTF-8")); } } return queryPairs; }
@Test public void shouldFetchAllIngredients() { Iterable<Ingredient> ingredients = flavorController.getIngredients(); assertNotNull(ingredients); Map<String, Ingredient> ing = new HashMap<>(); for (Ingredient ingredient : ingredients) { ing.put(ingredient.getName(), ingredient); } assertEquals(5, ing.size()); assertEquals(ingredientRepository.findByName("Chicken").iterator().next(), ing.get("Chicken")); assertEquals(ingredientRepository.findByName("Carrot").iterator().next(), ing.get("Carrot")); assertEquals(ingredientRepository.findByName("Butter").iterator().next(), ing.get("Butter")); assertEquals( ingredientRepository.findByName("Coriander").iterator().next(), ing.get("Coriander")); assertEquals(ingredientRepository.findByName("Yoghurt").iterator().next(), ing.get("Yoghurt")); }
// FIXME: fails! needs MarcCombiningReader for mhld or at least a diff version of RawRecordReader @Test public void testMultMHLDsWithSameID() throws IOException { // bib134, multMhlds1 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs134.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds1Mult.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); Record mergedRec = mergedRecs.get("a1"); assertEquals("Expected three 852", 3, mergedRec.getVariableFields("852").size()); Set<String> expectedVals = new HashSet<String>(); expectedVals.add("Location1"); expectedVals.add("Location2"); RecordTestingUtils.assertSubfieldHasExpectedValues(mergedRec, "852", 'b', expectedVals); expectedVals.clear(); expectedVals.add("(month)"); expectedVals.add("(season)"); RecordTestingUtils.assertSubfieldHasExpectedValues(mergedRec, "853", 'b', expectedVals); assertEquals("Expected one 863", 2, mergedRec.getVariableFields("863").size()); assertEquals("Expected one 866", 1, mergedRec.getVariableFields("866").size()); // fail("Implement me"); System.out.println("Test testMultMHLDsWithSameID() successful"); }
@Test @org.junit.Ignore public void large_node_size() { for (int i : new int[] {10, 200, 6000}) { int max = i * 100; File f = TT.tempDbFile(); DB db = DBMaker.fileDB(f).transactionDisable().make(); Map m = db.treeMapCreate("map") .nodeSize(i) .keySerializer(BTreeKeySerializer.INTEGER) .valueSerializer(Serializer.INTEGER) .make(); for (int j = 0; j < max; j++) { m.put(j, j); } db.close(); db = DBMaker.fileDB(f).deleteFilesAfterClose().transactionDisable().make(); m = db.treeMap("map"); for (Integer j = 0; j < max; j++) { assertEquals(j, m.get(j)); } db.close(); } }
@Test public void testPutGet() { Map<Integer, Integer> myMap = new MyMap<>(); Map<Integer, Integer> control = new HashMap<>(); for (int i = 0; i < N; i++) { int k = random.nextInt(); int v = random.nextInt(); myMap.put(k, v); control.put(k, v); } for (int k : control.keySet()) { assertEquals(myMap.get(k), control.get(k)); int r = random.nextInt(); assertEquals(myMap.get(r), control.get(r)); } }
@Test public void putToTheMap() throws InterruptedException { HazelcastClient hClient = getHazelcastClient(); Map<String, String> clientMap = hClient.getMap("putToTheMap"); assertEquals(0, clientMap.size()); String result = clientMap.put("1", "CBDEF"); assertNull(result); assertEquals("CBDEF", clientMap.get("1")); assertEquals("CBDEF", clientMap.get("1")); assertEquals("CBDEF", clientMap.get("1")); assertEquals(1, clientMap.size()); result = clientMap.put("1", "B"); assertEquals("CBDEF", result); assertEquals("B", clientMap.get("1")); assertEquals("B", clientMap.get("1")); }
/** code should output the unchanged bib records if no mhlds match */ @Test public void testNoMatches() throws IOException { // bib46, mhld235 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs46.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds235.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(2, mergedRecIds.size()); // result bibs should match the bib input because there was no merge String id = "a4"; RecordTestingUtils.assertEquals(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); id = "a6"; RecordTestingUtils.assertEquals(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); System.out.println("Test testNoMatches() successful"); }
private MapMessage newMapMessage(Map<String, Object> body) throws JMSException { MapMessage message = new MapMessageImpl(); for (String key : body.keySet()) { Object value = body.get(key); message.setObject(key, value); } return message; }
@Test public void getPuttedValueFromTheMap() { HazelcastClient hClient = getHazelcastClient(); Map<String, String> clientMap = hClient.getMap("getPuttedValueFromTheMap"); int size = clientMap.size(); clientMap.put("1", "Z"); String value = clientMap.get("1"); assertEquals("Z", value); assertEquals(size + 1, clientMap.size()); }
@Test public void get() throws InterruptedException { HazelcastClient hClient = getHazelcastClient(); MultiMap<String, Integer> multiMap = hClient.getMultiMap("get"); assertTrue(multiMap.put("a", 1)); assertTrue(multiMap.put("a", 2)); Map<Integer, CountDownLatch> map = new HashMap<Integer, CountDownLatch>(); map.put(1, new CountDownLatch(1)); map.put(2, new CountDownLatch(1)); Collection<Integer> collection = multiMap.get("a"); assertEquals(Values.class, collection.getClass()); assertEquals(2, collection.size()); for (Iterator<Integer> it = collection.iterator(); it.hasNext(); ) { Integer o = it.next(); map.get(o).countDown(); } assertTrue(map.get(1).await(10, TimeUnit.SECONDS)); assertTrue(map.get(2).await(10, TimeUnit.SECONDS)); }
/** code should find a match when non-last bib matches last mhld */ @Test public void testNonLastBibMatchesLastMhld() throws IOException { // bib46, mhld34 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs46.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds34.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); // there should be 2 results Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(2, mergedRecIds.size()); // result bib 6 only should have the mhld fields String id = "a4"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); id = "a6"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); System.out.println("Test testLastBibMatchesNonLastMhld() successful"); }
private void doOperatorTask() throws ClassNotFoundException, IOException { BlockingTaskSummaryResponseHandler handler = new BlockingTaskSummaryResponseHandler(); client.getTasksAssignedAsPotentialOwner("operator", "en-UK", handler); List<TaskSummary> sums = handler.getResults(); assertNotNull(sums); assertEquals(1, sums.size()); BlockingTaskOperationResponseHandler startTaskOperationHandler = new BlockingTaskOperationResponseHandler(); client.start(sums.get(0).getId(), "operator", startTaskOperationHandler); BlockingGetTaskResponseHandler getTaskHandler = new BlockingGetTaskResponseHandler(); client.getTask(sums.get(0).getId(), getTaskHandler); Task operatorTask = getTaskHandler.getTask(); BlockingGetContentResponseHandler getContentHandler = new BlockingGetContentResponseHandler(); client.getContent(operatorTask.getTaskData().getDocumentContentId(), getContentHandler); Content content = getContentHandler.getContent(); assertNotNull(content); ByteArrayInputStream bais = new ByteArrayInputStream(content.getContent()); ObjectInputStream ois = new ObjectInputStream(bais); Map<String, Object> deserializedContent = (Map<String, Object>) ois.readObject(); Call restoredCall = (Call) deserializedContent.get("call"); persistenceService.storeCall(restoredCall); Emergency emergency = new Emergency(); emergency.setCall(restoredCall); emergency.setLocation(new Location(1, 2)); emergency.setType(Emergency.EmergencyType.HEART_ATTACK); emergency.setNroOfPeople(1); persistenceService.storeEmergency(emergency); trackingService.attachEmergency(restoredCall.getId(), emergency.getId()); Map<String, Object> info = new HashMap<String, Object>(); info.put("emergency", emergency); ContentData result = new ContentData(); result.setAccessType(AccessType.Inline); result.setType("java.util.Map"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(bos); out.writeObject(info); out.close(); result.setContent(bos.toByteArray()); BlockingTaskOperationResponseHandler completeTaskOperationHandler = new BlockingTaskOperationResponseHandler(); client.complete(sums.get(0).getId(), "operator", result, completeTaskOperationHandler); }
@Test public void in_memory_test() { StorageDirect engine = new StorageDirect(Volume.memoryFactory(false)); Map<Long, Integer> recids = new HashMap<Long, Integer>(); for (int i = 0; i < 1000; i++) { long recid = engine.recordPut(i, Serializer.BASIC_SERIALIZER); recids.put(recid, i); } for (Long recid : recids.keySet()) { assertEquals(recids.get(recid), engine.recordGet(recid, Serializer.BASIC_SERIALIZER)); } }
@Test public void dataset() { Document doc = Jsoup.parse( "<div id=1 data-name=jsoup class=new data-package=jar>Hello</div><p id=2>Hello</p>"); Element div = doc.select("div").first(); Map<String, String> dataset = div.dataset(); Attributes attributes = div.attributes(); // size, get, set, add, remove assertEquals(2, dataset.size()); assertEquals("jsoup", dataset.get("name")); assertEquals("jar", dataset.get("package")); dataset.put("name", "jsoup updated"); dataset.put("language", "java"); dataset.remove("package"); assertEquals(2, dataset.size()); assertEquals(4, attributes.size()); assertEquals("jsoup updated", attributes.get("data-name")); assertEquals("jsoup updated", dataset.get("name")); assertEquals("java", attributes.get("data-language")); assertEquals("java", dataset.get("language")); attributes.put("data-food", "bacon"); assertEquals(3, dataset.size()); assertEquals("bacon", dataset.get("food")); attributes.put("data-", "empty"); assertEquals(null, dataset.get("")); // data- is not a data attribute Element p = doc.select("p").first(); assertEquals(0, p.dataset().size()); }
@Test public void testSODA() { // should say fieldsTypes, maybe with object/component prefix Map<String, Object> eventTypes = new HashMap<>(); eventTypes.put(LITERAL_SYMBOL, String.class); eventTypes.put(LITERAL_PRICE, Integer.class); EPStatementObjectModel model = new EPStatementObjectModel(); model.setInsertInto(InsertIntoClause.create(LITERAL_RETURN_OBJ)); model.setSelectClause( SelectClause.create().add(Expressions.avg(LITERAL_PRICE), LITERAL_AVG).add(LITERAL_PRICE)); Filter filter = Filter.create("quotes_default", Expressions.eq(LITERAL_SYMBOL, "A")); model.setFromClause( FromClause.create( FilterStream.create(filter).addView("win", "length", Expressions.constant(2)))); model.setHavingClause( Expressions.gt(Expressions.avg(LITERAL_PRICE), Expressions.constant(60.0))); TopologyBuilder builder = new TopologyBuilder(); builder.setSpout(LITERAL_QUOTES, new RandomSentenceSpout()); builder .setBolt( LITERAL_ESPER, (new EsperBolt()) .addEventTypes(eventTypes) .addOutputTypes( Collections.singletonMap( LITERAL_RETURN_OBJ, Arrays.asList(LITERAL_AVG, LITERAL_PRICE))) .addObjectStatemens(Collections.singleton(model))) .shuffleGrouping(LITERAL_QUOTES); builder.setBolt("print", new PrinterBolt()).shuffleGrouping(LITERAL_ESPER, LITERAL_RETURN_OBJ); Config conf = new Config(); LocalCluster cluster = new LocalCluster(); cluster.submitTopology("test", conf, builder.createTopology()); Utils.sleep(10000); cluster.shutdown(); assertEquals(resultSODA.get(100), new Double(75.0)); assertEquals(resultSODA.get(50), new Double(75.0)); }
@Test public void testLong() { DB db = DBMaker.memoryDB().make(); Map m = db.treeMap("test").keySerializer(Serializer.LONG).createOrOpen(); for (long i = 0; i < 1000; i++) { m.put(i * i, i * i + 1); } for (long i = 0; i < 1000; i++) { assertEquals(i * i + 1, m.get(i * i)); } }
@Test public void testGetAuthentications() { Map<String, Authentication> auths = apiClient.getAuthentications(); Authentication auth = auths.get("api_key"); assertNotNull(auth); assertTrue(auth instanceof ApiKeyAuth); ApiKeyAuth apiKeyAuth = (ApiKeyAuth) auth; assertEquals("header", apiKeyAuth.getLocation()); assertEquals("api_key", apiKeyAuth.getParamName()); auth = auths.get("petstore_auth"); assertTrue(auth instanceof OAuth); assertSame(auth, apiClient.getAuthentication("petstore_auth")); assertNull(auths.get("unknown")); try { auths.put("my_auth", new HttpBasicAuth()); fail("the authentications returned should not be modifiable"); } catch (UnsupportedOperationException e) { } }
@Test public void getDistinctKeysAndCounts() { Connection connection = null; ResultSet resultSet = null; try { ConnectionManager connectionManager = temporaryFileDatabase.getConnectionManager(true); initWithTestData(connectionManager); connection = connectionManager.getConnection(null); resultSet = DBQueries.getDistinctKeysAndCounts(false, NAME, connection); Map<String, Integer> resultSetToMap = resultSetToMap(resultSet); assertEquals(3, resultSetToMap.size()); assertEquals(new Integer(2), resultSetToMap.get("abs")); assertEquals(new Integer(1), resultSetToMap.get("airbags")); assertEquals(new Integer(1), resultSetToMap.get("gps")); } finally { DBUtils.closeQuietly(resultSet); DBUtils.closeQuietly(connection); } }
/** * This method adds all of the necessary information for the lab variables to the expected map so * the values can be compared later. * * @param expected [out] to populate expected dynamic values * @param vars [out] a list of AbstractVariables needed for the calculation * @param patient the patient for the current calculation */ private void addAllLabs( final VariableEntry expected, final List<AbstractVariable> vars, final Patient patient) { for (final String key : LAB_MAP.keySet()) { // Warning: semantic coupling. Simluate what VariableEntry would do if we // constructed it using the variable list. expected.putDynamicValue(key, VariableEntry.SPECIAL_NUMERICAL); final ValueRetriever retriever = LAB_MAP.get(key); vars.add(makeDiscreteNumerical(key, retriever)); final RetrievedValue labValue = patient.getLabs().get(VistaLabs.valueOf(retriever.name())); final String inputKey = VariableEntry.makeNumericalInputName(key); expected.putRetrievedValue(inputKey, labValue); } }
/** test methods that return Map of ids to Records and no sysout stuff */ @Test public void testGettingOutputAsMapOfRecords() throws IOException { String mhldRecFileName = testDataParentPath + File.separator + "summaryHld_1-1000.mrc"; String bibRecFileName = testDataParentPath + File.separator + "u335.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibRecFileName, mhldRecFileName); junit.framework.Assert.assertEquals("results should have 1 record", 1, mergedRecs.size()); String expId = "u335"; assertTrue("Record with id " + expId + " should be in results", mergedRecs.containsKey(expId)); Record resultRec = mergedRecs.get(expId); RecordTestingUtils.assertEqualsIgnoreLeader(mergedSummaryHoldingsOutputNoUmlaut, resultRec); System.out.println("Test testGettingOutputAsMapOfRecords() successful"); }
@Test public void testString() { DB db = DBMaker.memoryDB().make(); Map m = db.treeMap("test").keySerializer(Serializer.STRING).createOrOpen(); List<String> list = new ArrayList<String>(); for (long i = 0; i < 1000; i++) { String s = "" + Math.random() + (i * i * i); m.put(s, s + "aa"); } for (String s : list) { assertEquals(s + "aa", m.get(s)); } }
@Test public void insertTest() { String insertKey = "user0"; Map<String, ByteIterator> insertMap = insertRow(insertKey); ODocument result = orientDBDictionary.get(insertKey); assertTrue("Assert a row was inserted.", result != null); for (int i = 0; i < NUM_FIELDS; i++) { assertEquals( "Assert all inserted columns have correct values.", result.field(FIELD_PREFIX + i), insertMap.get(FIELD_PREFIX + i).toString()); } }
@Test public void testRMSPropUpdater() { double lr = 0.01; double rmsDecay = 0.25; Map<String, INDArray> lastG = new HashMap<>(); NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder() .learningRate(lr) .rmsDecay(rmsDecay) .layer( new DenseLayer.Builder() .nIn(nIn) .nOut(nOut) .updater(org.deeplearning4j.nn.conf.Updater.RMSPROP) .build()) .build(); int numParams = LayerFactories.getFactory(conf).initializer().numParams(conf, true); INDArray params = Nd4j.create(1, numParams); Layer layer = LayerFactories.getFactory(conf).create(conf, null, 0, params, true); Updater updater = UpdaterCreator.getUpdater(layer); int updaterStateSize = updater.stateSizeForLayer(layer); INDArray updaterState = Nd4j.create(1, updaterStateSize); updater.setStateViewArray(layer, updaterState, true); updater.update(layer, gradient, -1, 1); Gradient gradientDup = new DefaultGradient(); gradientDup.setGradientFor(DefaultParamInitializer.WEIGHT_KEY, weightGradient.dup()); gradientDup.setGradientFor(DefaultParamInitializer.BIAS_KEY, biasGradient.dup()); for (Map.Entry<String, INDArray> entry : gradientDup.gradientForVariable().entrySet()) { key = entry.getKey(); val = entry.getValue(); INDArray lastGTmp = lastG.get(key); if (lastGTmp == null) lastGTmp = Nd4j.zeros(val.shape()); lastGTmp.muli(rmsDecay).addi(val.mul(val).muli(1 - rmsDecay)); gradExpected = val.mul(lr).div(Transforms.sqrt(lastGTmp.add(Nd4j.EPS_THRESHOLD))); assertEquals(gradExpected, gradient.getGradientFor(entry.getKey())); lastG.put(key, lastGTmp); } assertEquals(rmsDecay, layer.conf().getLayer().getRmsDecay(), 1e-4); }