public void testHeadNotPopular() throws Exception { VersionCounts versionCounts = VersionCounts.make(); VoteBlock vb1 = makeVoteBlock("http://test.com/foo1"); byte[] hash1 = addVersion(vb1, "content 1 for foo1"); byte[] hash2 = addVersion(vb1, "content 2 for foo1"); VoteBlock vb2 = makeVoteBlock("http://test.com/foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 2 for foo1"); VoteBlock vb3 = makeVoteBlock("http://test.com/foo1"); addVersion(vb3, "content 3 for foo1"); addVersion(vb3, "content 2 for foo1"); versionCounts.vote(vb1, participant1); versionCounts.vote(vb2, participant2); versionCounts.vote(vb3, participant3); Map<ParticipantUserData, HashResult> repairCandidates; repairCandidates = versionCounts.getRepairCandidates(0); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(1); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(2); assertSameElements(SetUtil.set(participant1, participant2), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(3); assertEmpty(repairCandidates.keySet()); }
public void testMultipleIdenticalVersions() throws Exception { VersionCounts versionCounts = VersionCounts.make(); VoteBlock vb1 = makeVoteBlock("http://test.com/foo1"); byte[] hash1 = addVersion(vb1, "content 1 for foo1"); byte[] hash2 = addVersion(vb1, "content 2 for foo1"); VoteBlock vb2 = makeVoteBlock("http://test.com/foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 2 for foo1"); VoteBlock vb3 = makeVoteBlock("http://test.com/foo1"); addVersion(vb3, "content 1 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); versionCounts.vote(vb1, participant1); versionCounts.vote(vb2, participant2); versionCounts.vote(vb3, participant3); Map<ParticipantUserData, HashResult> repairCandidates; repairCandidates = versionCounts.getRepairCandidates(2); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); // With only three candidates, no version should reach a threshold // of 4, unless counting multiples is wrong. repairCandidates = versionCounts.getRepairCandidates(4); assertEmpty(repairCandidates.keySet()); }
// Break the line at commas, return a map of the resulting strings // broken at equals sign. (<i>Ie</i>, name value pairs.) Map getRow(String line) { Map map = new HashMap(); for (Iterator iter = StringUtil.breakAt(line, ',').iterator(); iter.hasNext(); ) { String item = (String) iter.next(); List pair = StringUtil.breakAt(item, '='); map.put(pair.get(0), pair.get(1)); } return map; }
/* * When testing no-pdf-check basic XML parsing, you will get partial MD records * depending on whether the info comes from dataset.xml or from main.xml */ private void validateDatasetMetadataRecord(ArticleMetadata am) { log.debug3("valideDatasetMetadatRecord"); String doi_val = am.get(MetadataField.FIELD_DOI); assertEquals(common_issn, am.get(MetadataField.FIELD_ISSN)); log.debug3("doi val is: " + doi_val); // The dataset doesn't set this value, it'll fail over the main.xml value if (doi_val.equals("10.1016/S0140-1111(14)61865-1")) { assertEquals(null, am.get(MetadataField.FIELD_DATE)); } else { assertEquals(dateMap.get(doi_val), am.get(MetadataField.FIELD_DATE)); } assertEquals(pubTitleMap.get(doi_val), am.get(MetadataField.FIELD_PUBLICATION_TITLE)); }
/* * You will have to tell it the DOI and the schema because those normally come from dataset */ private void validateSingleMainMetadataRecord(ArticleMetadata am, String doi_val, String schema) { log.debug3("valideSingleMainMetadatRecord"); if ("simple-article".equals(schema)) { assertEquals(common_simple_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE)); } else { assertEquals(common_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE)); } log.debug3("doi val is: " + doi_val); assertEquals(authorMap.get(doi_val), am.getList(MetadataField.FIELD_AUTHOR)); assertEquals(volMap.get(doi_val), am.get(MetadataField.FIELD_VOLUME)); assertEquals(issueMap.get(doi_val), am.get(MetadataField.FIELD_ISSUE)); assertEquals("Comment", am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_dochead)); assertEquals(doi_val, am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_doi)); assertEquals("2014", am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_copyright)); }
public void setRepoMap(Map<String, PlatformUtil.DF> repoMap) { List repos = new ArrayList(); this.repoMap = repoMap; for (String repo : repoMap.keySet()) { repos.add(repo); } setRepos(repos); }
public void testFromArgs() { Map m1 = MapUtil.map( "a", "1", "b", "2", "c", "3", "d", "4", "e", "5", "f", "6", "g", "7", "h", "8", "j", "9", "k", "10"); Map exp = new HashMap(); exp.put("a", "1"); exp.put("b", "2"); exp.put("c", "3"); exp.put("d", "4"); exp.put("e", "5"); exp.put("f", "6"); exp.put("g", "7"); exp.put("h", "8"); exp.put("j", "9"); exp.put("k", "10"); assertEquals(exp, m1); try { MapUtil.map( "a", "1", "b", "2", "c", "3", "d", "4", "e", "5", "f", "6", "g", "7", "h", "8", "j", "9", "k"); fail("Odd length arg list should throw"); } catch (IllegalArgumentException e) { } }
public void testText() throws Exception { MockStatusAccessor statusAccessor = MockStatusAccessor.generateStatusAccessor(colArray1, rowArray1); statusAccessor.setTitle("testtbl", null); statSvc.registerStatusAccessor("testtbl", statusAccessor); WebResponse resp = getTable("testtbl", true); assertResponseOk(resp); assertEquals("Content type", "text/plain", resp.getContentType()); log.debug(resp.getText()); List lines = getLines(resp); assertEquals(rowArray1.length + 3, lines.size()); Map row0 = getRow((String) lines.get(0)); assertEquals("2.4.6.8", row0.get("host")); Map row2 = getRow((String) lines.get(2)); assertEquals("testtbl", row2.get("table")); assertEqualTables(table1, lines); }
public void testHeadNotAllowed() throws Exception { VersionCounts versionCounts = VersionCounts.make(); VoteBlock vb1 = makeVoteBlock("http://test.com/foo1"); byte[] hash1 = addVersion(vb1, "content 1 for foo1"); byte[] hash2 = addVersion(vb1, "content 2 for foo1"); versionCounts.vote(vb1, participant1); Map<ParticipantUserData, HashResult> repairCandidates; repairCandidates = versionCounts.getRepairCandidates(0); assertSameElements(SetUtil.set(participant1), repairCandidates.keySet()); // Same, but with an excluded version that doesn't matter. repairCandidates = versionCounts.getRepairCandidates(0, SetUtil.set(HashResult.make(hash2))); assertSameElements(SetUtil.set(participant1), repairCandidates.keySet()); // Same, but with an excluded version that does matter repairCandidates = versionCounts.getRepairCandidates(0, SetUtil.set(HashResult.make(hash1))); assertEmpty(repairCandidates); }
/* * When testing a complete extraction out of the tarset, the MD record will be completely filled in * and pdf-existence will get established */ private void validateCompleteMetadataRecord(ArticleMetadata am) { log.debug3("valideCompleteMetadatRecord"); String doi_val = am.get(MetadataField.FIELD_DOI); /* make sure we can pick up both types of xml article data */ log.debug3("doi val is: " + doi_val); if ("JA 5.2.0 SIMPLE-ARTICLE" .equals(am.getRaw(ElsevierDatasetXmlSchemaHelper.dataset_dtd_metadata))) { log.debug3("simple-article"); assertEquals(common_simple_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE)); } else { assertEquals(common_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE)); } assertEquals(common_issn, am.get(MetadataField.FIELD_ISSN)); assertEquals(authorMap.get(doi_val), am.getList(MetadataField.FIELD_AUTHOR)); assertEquals(dateMap.get(doi_val), am.get(MetadataField.FIELD_DATE)); assertEquals(accessUrlMap.get(doi_val), am.get(MetadataField.FIELD_ACCESS_URL)); assertEquals(volMap.get(doi_val), am.get(MetadataField.FIELD_VOLUME)); assertEquals(issueMap.get(doi_val), am.get(MetadataField.FIELD_ISSUE)); assertEquals(pubTitleMap.get(doi_val), am.get(MetadataField.FIELD_PUBLICATION_TITLE)); assertEquals("Elsevier", am.get(MetadataField.FIELD_PROVIDER)); assertEquals("Elsevier", am.get(MetadataField.FIELD_PUBLISHER)); log.debug3(am.ppString(2)); }
public PlatformUtil.DF getRepositoryDF(String repoName) { if (repoMap != null) return (PlatformUtil.DF) repoMap.get(repoName); return super.getRepositoryDF(repoName); }
/* * The supporting methods */ private void setUpExpectedTarContent() { /* maps the DOIs in the metadata to the expected values */ log.debug3("setUpExpectedTarContent"); pubTitleMap = new HashMap<String, String>(); { pubTitleMap.put("10.1016/j.jidx.2014.07.028", "International Journal of XXX"); pubTitleMap.put("10.1016/j.jidx2.2014.05.013", "Revista"); pubTitleMap.put("10.1016/S1473-1111(14)70840-0", "The Journal"); pubTitleMap.put("10.1016/S0140-1111(14)61865-1", "The Other Journal"); pubTitleMap.put("10.1016/j.foo.2014.08.001", "Foo"); pubTitleMap.put("10.1016/j.foo.2014.08.123", "Foo"); } ; dateMap = new HashMap<String, String>(); { dateMap.put("10.1016/j.jidx.2014.07.028", "2014-07-30"); dateMap.put("10.1016/j.jidx2.2014.05.013", "2014-07-09"); dateMap.put("10.1016/S1473-1111(14)70840-0", "2014-09-01"); dateMap.put("10.1016/S0140-1111(14)61865-1", "2014"); // will get from main.xml as backup dateMap.put("10.1016/j.foo.2014.08.001", "2014-08-20"); dateMap.put("10.1016/j.foo.2014.08.123", "2014-08-20"); } ; accessUrlMap = new HashMap<String, String>(); { accessUrlMap.put( "10.1016/j.jidx.2014.07.028", TAR_A_BASE + SUBDIR + "01420615/v64sC/S0142061514004608/main.pdf"); accessUrlMap.put( "10.1016/j.jidx2.2014.05.013", TAR_A_BASE + SUBDIR + "00349356/v61i9/S0034935614001819/main.pdf"); accessUrlMap.put( "10.1016/S1473-1111(14)70840-0", TAR_A_BASE + SUBDIR + "14733099/v14i10/S1473309914708400/main.pdf"); accessUrlMap.put( "10.1016/S0140-1111(14)61865-1", TAR_B_BASE + SUBDIR + "01406736/v384sS1/S0140673614618651/main.pdf"); accessUrlMap.put( "10.1016/j.foo.2014.08.001", TAR_B_BASE + SUBDIR + "00191035/v242sC/S0019103514004151/main.pdf"); accessUrlMap.put( "10.1016/j.foo.2014.08.123", TAR_B_BASE + SUBDIR + "00191035/v242sC/S0019103514003856/main.pdf"); } ; ArrayList<String> goodAuthors = new ArrayList<String>(); { goodAuthors.add("Writer, Bob"); goodAuthors.add("Q. Text, Samantha"); } ArrayList<String> simpleAuthors = new ArrayList<String>(); { simpleAuthors.add("Simple, Josh"); } ArrayList<String> extendedAuthors = new ArrayList<String>(); { extendedAuthors.add("Writer, Bob"); extendedAuthors.add("Q. Text, Samantha"); extendedAuthors.add("The COLLABORATIVE Investigators"); } authorMap = new HashMap<String, List<String>>(); { authorMap.put("10.1016/j.jidx.2014.07.028", goodAuthors); authorMap.put("10.1016/j.jidx2.2014.05.013", goodAuthors); authorMap.put("10.1016/S1473-1111(14)70840-0", extendedAuthors); authorMap.put("10.1016/S0140-1111(14)61865-1", simpleAuthors); authorMap.put("10.1016/j.foo.2014.08.001", goodAuthors); authorMap.put("10.1016/j.foo.2014.08.123", goodAuthors); } ; volMap = new HashMap<String, String>(); { volMap.put("10.1016/j.jidx.2014.07.028", "64"); volMap.put("10.1016/j.jidx2.2014.05.013", "61"); volMap.put("10.1016/S1473-1111(14)70840-0", "14"); volMap.put("10.1016/S0140-1111(14)61865-1", "384"); volMap.put("10.1016/j.foo.2014.08.001", "242"); volMap.put("10.1016/j.foo.2014.08.123", "242"); } ; issueMap = new HashMap<String, String>(); { issueMap.put("10.1016/j.jidx.2014.07.028", "C"); issueMap.put("10.1016/j.jidx2.2014.05.013", "9"); issueMap.put("10.1016/S1473-1111(14)70840-0", "10"); issueMap.put("10.1016/S0140-1111(14)61865-1", "S1"); issueMap.put("10.1016/j.foo.2014.08.001", "C"); issueMap.put("10.1016/j.foo.2014.08.123", "C"); } ; }