public static void main(String[] args) throws Exception { new EarlReport() .generateReport( new TriGParserTest().createTestSuite(), EarlReport.ANSELL, ValueFactoryImpl.getInstance().createURI("http://www.w3.org/TR/trig/")); }
@Test public void testEmpireUtil() throws Exception { SupportsRdfId aId = new SupportsRdfIdImpl(); assertTrue(EmpireUtil.asResource(aId) == null); Resource aRes = EmpireUtil.asResource(new SupportsRdfIdImpl(new SupportsRdfId.BNodeKey("asdf"))); assertTrue(aRes instanceof BNode); assertEquals(((BNode) aRes).getID(), "asdf"); aId = EmpireUtil.asSupportsRdfId(java.net.URI.create("urn:foo")); assertTrue(aId.getRdfId() instanceof SupportsRdfId.URIKey); assertEquals(aId.getRdfId().value(), java.net.URI.create("urn:foo")); assertTrue(EmpireUtil.getNamedGraph("") == null); SupportsRdfId.RdfKey aKey = EmpireUtil.asPrimaryKey(new URL("http://example.org")); assertTrue(aKey instanceof SupportsRdfId.URIKey); assertEquals(aKey.value(), new URL("http://example.org").toURI()); BNode aAnon = ValueFactoryImpl.getInstance().createBNode("foobar"); aKey = EmpireUtil.asPrimaryKey(aAnon); assertTrue(aKey instanceof SupportsRdfId.BNodeKey); assertEquals(aKey.value(), "foobar"); }
/** * Constructor. * * @param namespace the namespace URI prefix. */ public Vocabulary(String namespace) { try { this.namespace = ValueFactoryImpl.getInstance().createURI(namespace); } catch (Exception e) { throw new IllegalArgumentException("Invalid namespace '" + namespace + "'", e); } }
static { ValueFactory factory = ValueFactoryImpl.getInstance(); REPOSITORY_CONTEXT = factory.createURI(NAMESPACE, "RepositoryContext"); REPOSITORY = factory.createURI(NAMESPACE, "Repository"); REPOSITORYID = factory.createURI(NAMESPACE, "repositoryID"); REPOSITORYIMPL = factory.createURI(NAMESPACE, "repositoryImpl"); REPOSITORYTYPE = factory.createURI(NAMESPACE, "repositoryType"); DELEGATE = factory.createURI(NAMESPACE, "delegate"); }
static { ValueFactory vf = ValueFactoryImpl.getInstance(); RESULTSET = vf.createURI(NAMESPACE, "ResultSet"); RESULTVARIABLE = vf.createURI(NAMESPACE, "resultVariable"); SOLUTION = vf.createURI(NAMESPACE, "solution"); BINDING = vf.createURI(NAMESPACE, "binding"); VALUE = vf.createURI(NAMESPACE, "value"); VARIABLE = vf.createURI(NAMESPACE, "variable"); BOOLEAN = vf.createURI(NAMESPACE, "boolean"); TRUE = vf.createLiteral(true); FALSE = vf.createLiteral(false); }
@Test public void testTimesTwo() throws InvalidRdfException { TestDoubleImpl obj = new TestDoubleImpl(); Graph g = RdfGenerator.asRdf(obj); assertEquals( 1, new ExtGraph(g) .getValues( EmpireUtil.asResource(obj), ValueFactoryImpl.getInstance().createURI(PrefixMapping.GLOBAL.uri("test:foo"))) .size()); }
/** * Loads RDF contents from the specified file into the repository. * * @param filePath Path to data file. * @throws RepositoryException If no connection could be established. */ public void loadFromFile(String filePath) throws RepositoryException { RepositoryConnection conn = repo.getConnection(); try { conn.add( new File(filePath), null, RDFFormat.forFileName(filePath), ValueFactoryImpl.getInstance().createURI("urn:defaultContext")); } catch (RDFParseException | IOException e) { throw new RuntimeException(e); } finally { conn.close(); } }
public void sendDataToGraphDB(File rdfFile) throws RepositoryException, IOException { ValueFactory factory = ValueFactoryImpl.getInstance(); RepositoryConnection con = httpRepository.getConnection(); // read line by line BufferedReader br = new BufferedReader(new FileReader(rdfFile)); String line; int i = 0; while ((line = br.readLine()) != null) { String subjectString = line.substring(0, line.indexOf(" ")); line = line.substring(line.indexOf(" "), line.length()).trim(); subjectString = trimString(subjectString); String predicateString = line.substring(0, line.indexOf(" ")); line = line.substring(line.indexOf(" "), line.length()).trim(); predicateString = trimString(predicateString); String objectString = line.substring(0, line.lastIndexOf(".")); objectString = trimString(objectString); // process the line. URI subject = factory.createURI(subjectString); URI predicate = factory.createURI(predicateString); Value object = objectString.startsWith(JsonToRDF.URI_START_WITH) ? factory.createURI(objectString) : factory.createLiteral(objectString); con.add(subject, predicate, object); i++; if (i >= 1000000) { con.commit(); i = 0; } } br.close(); }
/** Create a new ContextAwareValueFactory wrapping {@link ValueFactoryImpl#getInstance()} */ public ContextAwareValueFactory() { this(ValueFactoryImpl.getInstance()); }
/** * Test data and utilities. * * @author Andreas Wagner * @author Andrea Gazzarini * @since 1.0 */ public abstract class WebTestUtils { public static class LineSeparator { // Windows: \r\n Unix: \n Mac: \r public static final String WIN = "\r\n", MAC = "\r", UNIX = "\n"; public static final String[] ALL = new String[] {WIN, MAC, UNIX}; } public static final DataAccessLayerFactory T_DATA_ACCESS_LAYER_FACTORY = DataAccessLayerFactory.getDefaultDataAccessLayerFactory(StorageLayout.TRIPLE); public static final DataAccessLayerFactory Q_DATA_ACCESS_LAYER_FACTORY = DataAccessLayerFactory.getDefaultDataAccessLayerFactory(StorageLayout.QUAD); static final ValueFactory VALUE_FACTORY = ValueFactoryImpl.getInstance(); public static final Random RANDOMIZER = new Random(System.currentTimeMillis()); public static final String[] EMPTY_STRINGS = {"", " ", "\n\r", "\t"}; private static final String TMP_FILE = "./target/testing/tmp.txt"; public static final String EXAMPLE_CONFIG_FILE = "/cumulus.yaml"; public static final Value[] SELECT_ALL_TRIPLES_PATTERN = {null, null, null}; public static final Value[] SELECT_ALL_QUADS_PATTERN = {null, null, null, null}; public static final String STORE_LISTEN_ADDRESS = "localhost:9161"; public static final String TRIPLE_STORE_KEYSPACE_NAME = "KeyspaceCumulusTriple"; public static final String QUAD_STORE_KEYSPACE_NAME = "KeyspaceCumulusQuad"; /** * Converts a given iterator in a list. * * @param <T> * @param iterator the iterator. * @return a list containing elements found in the iterator. */ public static <T> List<T> asList(final Iterator<T> iterator) { final List<T> result = new ArrayList<T>(); while (iterator.hasNext()) { result.add(iterator.next()); } return result; } /** Copied from org.openrdf.query.QueryResultUtil */ private static boolean bindingSetsMatch(final BindingSet bs1, final BindingSet bs2) { if (bs1.size() != bs2.size()) { return false; } for (Binding binding1 : bs1) { Value value1 = binding1.getValue(); Value value2 = bs2.getValue(binding1.getName()); if ((value1 instanceof BNode) && (value2 instanceof BNode)) { // BNode mappedBNode = bNodeMapping.get(value1); // // if (mappedBNode != null) { // // bNode 'value1' was already mapped to some other bNode // if (!value2.equals(mappedBNode)) { // // 'value1' and 'value2' do not match // return false; // } // } else { // // 'value1' was not yet mapped, we need to check if 'value2' // // is a // // possible mapping candidate // if (bNodeMapping.containsValue(value2)) { // // 'value2' is already mapped to some other value. // return false; // } // } return value1.equals(value2); } else { // values are not (both) bNodes if ((value1 instanceof Literal) && (value2 instanceof Literal)) { // do literal value-based comparison for supported datatypes Literal leftLit = (Literal) value1; Literal rightLit = (Literal) value2; URI dt1 = leftLit.getDatatype(); URI dt2 = rightLit.getDatatype(); if ((dt1 != null) && (dt2 != null) && dt1.equals(dt2) && XMLDatatypeUtil.isValidValue(leftLit.getLabel(), dt1) && XMLDatatypeUtil.isValidValue(rightLit.getLabel(), dt2)) { Integer compareResult = null; if (dt1.equals(XMLSchema.DOUBLE)) { compareResult = Double.compare(leftLit.doubleValue(), rightLit.doubleValue()); } else if (dt1.equals(XMLSchema.FLOAT)) { compareResult = Float.compare(leftLit.floatValue(), rightLit.floatValue()); } else if (dt1.equals(XMLSchema.DECIMAL)) { compareResult = leftLit.decimalValue().compareTo(rightLit.decimalValue()); } else if (XMLDatatypeUtil.isIntegerDatatype(dt1)) { compareResult = leftLit.integerValue().compareTo(rightLit.integerValue()); } else if (dt1.equals(XMLSchema.BOOLEAN)) { Boolean leftBool = Boolean.valueOf(leftLit.booleanValue()); Boolean rightBool = Boolean.valueOf(rightLit.booleanValue()); compareResult = leftBool.compareTo(rightBool); } else if (XMLDatatypeUtil.isCalendarDatatype(dt1)) { XMLGregorianCalendar left = leftLit.calendarValue(); XMLGregorianCalendar right = rightLit.calendarValue(); compareResult = left.compare(right); } if (compareResult != null) { if (compareResult.intValue() != 0) { return false; } } else if (!value1.equals(value2)) { return false; } } else if (!value1.equals(value2)) { return false; } } else if (!value1.equals(value2)) { return false; } } } return true; } /** * Builds a literal with the given data. * * @param data the literal value. * @return a literal. */ public static Literal buildLiteral(final String data) { return VALUE_FACTORY.createLiteral(data); } /** * Builds a datatyped literal. * * @param data the literal value. * @param datatype the literal type. * @return a datatyped literal. */ public static Literal buildLiteral(final String data, final URI datatype) { return VALUE_FACTORY.createLiteral(data, datatype); } /** * Builds a new {@link URI} from a given string. * * @param value the uri as a string. * @return a new {@link URI} resource. */ public static URI buildResource(final String name) { return name.startsWith("http") ? VALUE_FACTORY.createURI(name) : VALUE_FACTORY.createURI("http://cumulus/" + name); } /** * Cleans up the given store. * * @param crdf could be a triple or a quad store. * @throws CumulusStoreException in case the cleanup fails. */ public static void clean(final Store crdf) throws CumulusStoreException { crdf.clear(); assertEquals( "Store seems not empty after issuing a delete * command.", 0, numOfRes( crdf.query( crdf instanceof TripleStore ? SELECT_ALL_TRIPLES_PATTERN : SELECT_ALL_QUADS_PATTERN))); } public static String contentAsString(File file, String lineSepartor) throws IOException { BufferedReader br = new BufferedReader(new FileReader(file)); String line = null; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { sb.append(line.trim()); sb.append(System.getProperty("line.separator")); } br.close(); return sb.toString(); } public static String idToString(byte[] id) { StringBuilder sb = new StringBuilder(); for (byte b : id) { sb.append(String.format("%02X ", b)); } return sb.toString(); } public static Iterator<Statement> loadNTriplesFromFile(File file) throws FileNotFoundException { return Util.parseNXAsIterator(new FileInputStream(file)); } public static Iterator<Statement> loadNTriplesFromFile(String path) throws FileNotFoundException { return loadNTriplesFromFile(new File(path)); } /** @see org.openrdf.query.QueryResultUtil */ public static boolean matchTupleQueryResults(TupleQueryResult res1, TupleQueryResult res2) throws QueryEvaluationException { List<BindingSet> queryResult1 = Iterations.asList(res1); List<BindingSet> queryResult2 = Iterations.asList(res2); if (queryResult1.size() != queryResult2.size()) { return false; } for (BindingSet bs1 : queryResult1) { boolean hit = false; for (BindingSet bs2 : queryResult2) { if (bindingSetsMatch(bs1, bs2)) { hit = true; break; } } if (!hit) { return false; } } return true; } /** * Returns a new instance of a quad store with default values. * * @return a new instance of a quad store with default values. */ public static final Store newQuadStore() { return new QuadStore(randomString()); } /** * Creates a new statement with the given data. * * @param localSubjectName the local subject name. * @param localPredicateName the local predicate name. * @param localObjectName the local object name. * @param localContextName the local context name. * @return a new statement. */ public static Statement newStatement( final String localSubjectName, final String localPredicateName, final String localObjectName, final String localContextName) { return VALUE_FACTORY.createStatement( buildResource(localSubjectName), buildResource(localPredicateName), buildResource(localObjectName), buildResource(localContextName)); } /** * Returns a new instance of a triple store with default values. * * @return a new instance of a triple store with default values. */ public static final Store newTripleStore() { return new TripleStore(randomString()); } /** * Returns how many triples are in the given iterator. * * @param <T> * @param nodes the iterator. * @return how many triples are in the given iterator. */ public static <T> int numOfRes(final Iterator<T> nodes) { int numOfTriples = 0; while (nodes.hasNext()) { nodes.next(); numOfTriples++; } return numOfTriples; } public static <T> int numOfRes(QueryResult<T> result) throws QueryEvaluationException { int numOfTriples = 0; while (result.hasNext()) { result.next(); numOfTriples++; } return numOfTriples; } public static int printIds(Iterator<byte[][]> ids, PrintStream stream) { if (!ids.hasNext()) { stream.println("nodes iterator empty"); return 0; } int numOfTriples = 0; while (ids.hasNext()) { byte[][] next_triple = ids.next(); if (next_triple.length == 4) { stream.println( idToString(next_triple[0]) + " " + idToString(next_triple[1]) + " " + idToString(next_triple[2]) + " " + idToString(next_triple[3]) + " . "); } else { stream.println( idToString(next_triple[0]) + " " + idToString(next_triple[1]) + " " + idToString(next_triple[2]) + " . "); } numOfTriples++; } return numOfTriples; } public static int printNQ(Iterator<Value[]> nodes, PrintStream stream) { if (!nodes.hasNext()) { stream.println("nodes iterator empty"); return 0; } int numOfTriples = 0; while (nodes.hasNext()) { Value[] next_triple = nodes.next(); stream.println( next_triple[0].toString() + " " + next_triple[1].toString() + " " + next_triple[2].toString() + " " + next_triple[3].toString() + " . "); numOfTriples++; } return numOfTriples; } public static int printNT(Iterator<Value[]> nodes, PrintStream stream) { if (!nodes.hasNext()) { stream.println("nodes iterator empty"); return 0; } int numOfTriples = 0; while (nodes.hasNext()) { Value[] next_triple = nodes.next(); stream.println( next_triple[0].toString() + " " + next_triple[1].toString() + " " + next_triple[2].toString() + " . "); numOfTriples++; } return numOfTriples; } /** * Generates a random int. * * @return a random int. */ public static final int randomInt() { return RANDOMIZER.nextInt(); } /** * Generates a random long. * * @return a random long. */ public static final long randomLong() { return RANDOMIZER.nextLong(); } /** * Generates a random string. * * @return a random string. */ public static final String randomString() { return String.valueOf(RANDOMIZER.nextLong()); } /** * Creates a tmp file under build directory. * * @return a reference to a temporary file. * @throws IOException in case the file cannot be created. */ public static File tmpFile() throws IOException { final File tmp = new File(TMP_FILE); final File parentDirectory = tmp.getParentFile(); if (!parentDirectory.exists()) { parentDirectory.mkdirs(); } if (tmp.exists()) { tmp.delete(); } if (tmp.createNewFile()) { return tmp; } else { throw new IOException("File " + tmp.getAbsolutePath() + " cannot be created."); } } }
public class ODSDCatGenerator extends TransformerBase<ODSCleanerConfig> implements ConfigBeanProvider<ODSCleanerConfig> { private static final String odsUrl = "http://data.opendatasupport.eu/"; private static final ValueFactory valueFactory = ValueFactoryImpl.getInstance(); public static final URI rawCatalogPredicate = valueFactory.createURI( "http://data.opendatasupport.eu/ontology/harmonisation.owl#" + "raw_catalog"); private static final URI rawDatasetPredicate = valueFactory.createURI( "http://data.opendatasupport.eu/ontology/harmonisation.owl#" + "raw_dataset"); private String catalogIdentifier = ""; @Override public void transform(Repository repository, URI graph, TransformContext context) throws TransformException { String catalogUrl = odsUrl + "id/catalog/" + catalogIdentifier + '/'; context.getCustomData().put("dcatTransformerGraph", catalogUrl); Collection<String> warnings = context.getWarnings(); URI catalogUri = valueFactory.createURI(catalogUrl); Value rawGraph = getRawGraph(repository, graph); if (rawGraph == null) { warnings.add("no catalog found"); throw new TransformException("no catalog found in raw data"); } try { RepositoryConnection connection = repository.getConnection(); try { connection.add( valueFactory.createStatement(catalogUri, rawCatalogPredicate, rawGraph), graph); connection.add( valueFactory.createStatement( catalogUri, LODMSPredicates.RDFTYPE, LODMSPredicates.DCAT_CATALOG), graph); copyCatalogAttributes(graph, catalogUri, rawGraph, connection); extractDatasetInfo(graph, catalogUri, rawGraph, connection); } catch (RepositoryException e) { warnings.add(e.getMessage()); throw new TransformException(e.getMessage(), e); } catch (MalformedQueryException e) { warnings.add(e.getMessage()); throw new TransformException(e.getMessage(), e); } catch (UpdateExecutionException e) { warnings.add(e.getMessage()); throw new TransformException(e.getMessage(), e); } finally { connection.close(); } } catch (Exception e) { throw new TransformException(e.getMessage(), e); } } private void extractDatasetInfo( URI graph, org.openrdf.model.Resource catalogUri, Value rawGraph, RepositoryConnection connection) throws RepositoryException, DatatypeConfigurationException { RepositoryResult<Statement> statements = connection.getStatements(null, LODMSPredicates.DCAT_CAT_PROP_DATASET, null, false, graph); Collection<Statement> statementList = statements.asList(); for (Statement s : statementList) { Value rawDatasetUrl = s.getObject(); String rawDatasetId = getrawDatasetId(rawDatasetUrl); URI catalogRecordUri = valueFactory.createURI(catalogUri.toString() + "record/" + rawDatasetId); URI harmonizedDatasetUri = valueFactory.createURI(catalogUri.toString() + "dataset/" + rawDatasetId); connection.add( valueFactory.createStatement( catalogUri, LODMSPredicates.DCAT_CAT_PROP_RECORD, catalogRecordUri), graph); connection.add( valueFactory.createStatement( catalogRecordUri, LODMSPredicates.FOAF_PRIMARYTOPIC, harmonizedDatasetUri), graph); connection.add( valueFactory.createStatement( catalogRecordUri, LODMSPredicates.RDFTYPE, LODMSPredicates.DCAT_CATALOGRECORD), graph); connection.add( valueFactory.createStatement(catalogRecordUri, rawDatasetPredicate, rawDatasetUrl), graph); connection.add( valueFactory.createStatement( catalogRecordUri, LODMSPredicates.DCT_MODIFIED, valueFactory.createLiteral(getXMLNow()))); connection.add( valueFactory.createStatement( harmonizedDatasetUri, LODMSPredicates.RDFTYPE, LODMSPredicates.DCAT_DATASET), graph); connection.add( valueFactory.createStatement( catalogUri, LODMSPredicates.DCAT_CAT_PROP_DATASET, harmonizedDatasetUri), graph); } statements.close(); } private String getrawDatasetId(Value rawDatasetUrl) { String dataset = rawDatasetUrl.stringValue(); int datasetOffset = dataset.lastIndexOf("dataset"); if (datasetOffset >= 0) { return dataset.substring(datasetOffset + 8); } return dataset.substring(dataset.lastIndexOf("/") + 1); } private static XMLGregorianCalendar getXMLNow() throws DatatypeConfigurationException { GregorianCalendar gregorianCalendar = new GregorianCalendar(); DatatypeFactory datatypeFactory = null; datatypeFactory = DatatypeFactory.newInstance(); return datatypeFactory.newXMLGregorianCalendar(gregorianCalendar); } private void copyCatalogAttributes( Value graph, Value catalogUri, Value rawGraph, RepositoryConnection connection) throws RepositoryException, MalformedQueryException, UpdateExecutionException { String query = "insert into ?graph { ?catalogUri ?p ?t. }\n" + " where { graph ?graph {\n" + " ?rawCatalogUri ?p ?t." + "}}"; Update u = connection.prepareUpdate(QueryLanguage.SPARQL, query); u.setBinding("catalogUri", catalogUri); u.setBinding("graph", graph); u.setBinding("rawCatalogUri", rawGraph); u.setBinding("p", LODMSPredicates.DCT_PUBLISHER); u.execute(); u.setBinding("p", LODMSPredicates.DCT_DESCRIPTION); u.execute(); u.setBinding("p", LODMSPredicates.DCT_TITLE); u.execute(); } private Value getRawGraph(Repository repository, URI graph) throws TransformException { RepositoryResult<Statement> s = null; try { RepositoryConnection connection = repository.getConnection(); try { List<Statement> catalogStatement = connection .getStatements( null, LODMSPredicates.RDFTYPE, LODMSPredicates.DCAT_CATALOG, false, graph) .asList(); if (catalogStatement.isEmpty()) return null; return catalogStatement.get(0).getSubject(); } catch (RepositoryException e) { connection.close(); return null; } } catch (RepositoryException e) { throw new TransformException(e.getMessage(), e); } } @Override public String getName() { return "ODS DCAT Application Profile Harmonizer"; } @Override public String getDescription() { return "Add this plugin to a DCAT harmonization pipeline to create an initial DCAT structure for each dataset in the pipeline."; } @Override public Resource getIcon(Application application) { return new ClassResource("/com/tenforce/lodms/transform/ods.png", application); } @Override public String asString() { return getName(); } @Override public ODSCleanerConfig newDefaultConfig() { return new ODSCleanerConfig(); } @SuppressWarnings("ParameterHidesMemberVariable") @Override protected void configureInternal(ODSCleanerConfig config) throws ConfigurationException { catalogIdentifier = config.getCatalogIdentifier(); } }
static { ValueFactory factory = ValueFactoryImpl.getInstance(); Community = factory.createURI(SIOC.NAMESPACE, "Community"); Container = factory.createURI(SIOC.NAMESPACE, "Container"); Forum = factory.createURI(SIOC.NAMESPACE, "Forum"); Item = factory.createURI(SIOC.NAMESPACE, "Item"); Post = factory.createURI(SIOC.NAMESPACE, "Post"); Role = factory.createURI(SIOC.NAMESPACE, "Role"); Site = factory.createURI(SIOC.NAMESPACE, "Site"); Space = factory.createURI(SIOC.NAMESPACE, "Space"); Thread = factory.createURI(SIOC.NAMESPACE, "Thread"); User = factory.createURI(SIOC.NAMESPACE, "User"); UserAccount = factory.createURI(SIOC.NAMESPACE, "UserAccount"); Usergroup = factory.createURI(SIOC.NAMESPACE, "Usergroup"); about = factory.createURI(SIOC.NAMESPACE, "about"); account_of = factory.createURI(SIOC.NAMESPACE, "account_of"); addressed_to = factory.createURI(SIOC.NAMESPACE, "addressed_to"); administrator_of = factory.createURI(SIOC.NAMESPACE, "administrator_of"); attachment = factory.createURI(SIOC.NAMESPACE, "attachment"); avatar = factory.createURI(SIOC.NAMESPACE, "avatar"); container_of = factory.createURI(SIOC.NAMESPACE, "container_of"); content = factory.createURI(SIOC.NAMESPACE, "content"); content_encoded = factory.createURI(SIOC.NAMESPACE, "content_encoded"); created_at = factory.createURI(SIOC.NAMESPACE, "created_at"); creator_of = factory.createURI(SIOC.NAMESPACE, "creator_of"); description = factory.createURI(SIOC.NAMESPACE, "description"); earlier_version = factory.createURI(SIOC.NAMESPACE, "earlier_version"); email = factory.createURI(SIOC.NAMESPACE, "email"); email_sha1 = factory.createURI(SIOC.NAMESPACE, "email_sha1"); embeds_knowledge = factory.createURI(SIOC.NAMESPACE, "embeds_knowledge"); feed = factory.createURI(SIOC.NAMESPACE, "feed"); first_name = factory.createURI(SIOC.NAMESPACE, "first_name"); follows = factory.createURI(SIOC.NAMESPACE, "follows"); function_of = factory.createURI(SIOC.NAMESPACE, "function_of"); group_of = factory.createURI(SIOC.NAMESPACE, "group_of"); has_administrator = factory.createURI(SIOC.NAMESPACE, "has_administrator"); has_container = factory.createURI(SIOC.NAMESPACE, "has_container"); has_creator = factory.createURI(SIOC.NAMESPACE, "has_creator"); has_discussion = factory.createURI(SIOC.NAMESPACE, "has_discussion"); has_function = factory.createURI(SIOC.NAMESPACE, "has_function"); has_group = factory.createURI(SIOC.NAMESPACE, "has_group"); has_host = factory.createURI(SIOC.NAMESPACE, "has_host"); has_member = factory.createURI(SIOC.NAMESPACE, "has_member"); has_moderator = factory.createURI(SIOC.NAMESPACE, "has_moderator"); has_modifier = factory.createURI(SIOC.NAMESPACE, "has_modifier"); has_owner = factory.createURI(SIOC.NAMESPACE, "has_owner"); has_parent = factory.createURI(SIOC.NAMESPACE, "has_parent"); has_part = factory.createURI(SIOC.NAMESPACE, "has_part"); has_reply = factory.createURI(SIOC.NAMESPACE, "has_reply"); has_scope = factory.createURI(SIOC.NAMESPACE, "has_scope"); has_space = factory.createURI(SIOC.NAMESPACE, "has_space"); has_subscriber = factory.createURI(SIOC.NAMESPACE, "has_subscriber"); has_usergroup = factory.createURI(SIOC.NAMESPACE, "has_usergroup"); host_of = factory.createURI(SIOC.NAMESPACE, "host_of"); id = factory.createURI(SIOC.NAMESPACE, "id"); ip_address = factory.createURI(SIOC.NAMESPACE, "ip_address"); last_activity_date = factory.createURI(SIOC.NAMESPACE, "last_activity_date"); last_item_date = factory.createURI(SIOC.NAMESPACE, "last_item_date"); last_name = factory.createURI(SIOC.NAMESPACE, "last_name"); last_reply_date = factory.createURI(SIOC.NAMESPACE, "last_reply_date"); later_version = factory.createURI(SIOC.NAMESPACE, "later_version"); latest_version = factory.createURI(SIOC.NAMESPACE, "latest_version"); link = factory.createURI(SIOC.NAMESPACE, "link"); links_to = factory.createURI(SIOC.NAMESPACE, "links_to"); member_of = factory.createURI(SIOC.NAMESPACE, "member_of"); moderator_of = factory.createURI(SIOC.NAMESPACE, "moderator_of"); modified_at = factory.createURI(SIOC.NAMESPACE, "modified_at"); modifier_of = factory.createURI(SIOC.NAMESPACE, "modifier_of"); name = factory.createURI(SIOC.NAMESPACE, "name"); next_by_date = factory.createURI(SIOC.NAMESPACE, "next_by_date"); next_version = factory.createURI(SIOC.NAMESPACE, "next_version"); note = factory.createURI(SIOC.NAMESPACE, "note"); num_authors = factory.createURI(SIOC.NAMESPACE, "num_authors"); num_items = factory.createURI(SIOC.NAMESPACE, "num_items"); num_replies = factory.createURI(SIOC.NAMESPACE, "num_replies"); num_threads = factory.createURI(SIOC.NAMESPACE, "num_threads"); num_views = factory.createURI(SIOC.NAMESPACE, "num_views"); owner_of = factory.createURI(SIOC.NAMESPACE, "owner_of"); parent_of = factory.createURI(SIOC.NAMESPACE, "parent_of"); part_of = factory.createURI(SIOC.NAMESPACE, "part_of"); previous_by_date = factory.createURI(SIOC.NAMESPACE, "previous_by_date"); previous_version = factory.createURI(SIOC.NAMESPACE, "previous_version"); reference = factory.createURI(SIOC.NAMESPACE, "reference"); related_to = factory.createURI(SIOC.NAMESPACE, "related_to"); reply_of = factory.createURI(SIOC.NAMESPACE, "reply_of"); scope_of = factory.createURI(SIOC.NAMESPACE, "scope_of"); sibling = factory.createURI(SIOC.NAMESPACE, "sibling"); space_of = factory.createURI(SIOC.NAMESPACE, "space_of"); subject = factory.createURI(SIOC.NAMESPACE, "subject"); subscriber_of = factory.createURI(SIOC.NAMESPACE, "subscriber_of"); title = factory.createURI(SIOC.NAMESPACE, "title"); topic = factory.createURI(SIOC.NAMESPACE, "topic"); usergroup_of = factory.createURI(SIOC.NAMESPACE, "usergroup_of"); }
/** * Creates a URI. * * @param namespace * @param localName * @return */ private URI createURI(String namespace, String localName) { return ValueFactoryImpl.getInstance().createURI(namespace, localName); }
/** * Creates a URI. * * @param uriStr the URI string * @return the URI instance. */ protected URI createURI(String uriStr) { return ValueFactoryImpl.getInstance().createURI(uriStr); }
/** * This is an odd issue someone reported for the trunk. There are two version of a plain Literal * <code>Brian McCarthy</code>, but it appears that one of the two versions has a leading bell * character when you decode the Unicode byte[]. I think that this is actually an issue with the * {@link Locale} and the Unicode sort key generation. If {@link KeyBuilder} as configured on the * system generates Unicode sort keys which compare as EQUAL for these two inputs then that will * cause the lexicon to report an "apparent" inconsistency. In fact, what we probably need to do * is just disable the inconsistency check in the lexicon. * * <pre> * ERROR: com.bigdata.rdf.lexicon.Id2TermWriteProc.apply(Id2TermWriteProc.java:205): val=[0, 2, 0, 14, 66, 114, 105, 97, 110, 32, 77, 99, 67, 97, 114, 116, 104, 121] * ERROR: com.bigdata.rdf.lexicon.Id2TermWriteProc.apply(Id2TermWriteProc.java:206): oldval=[0, 2, 0, 15, 127, 66, 114, 105, 97, 110, 32, 77, 99, 67, 97, 114, 116, 104, 121] * </pre> */ public void test_consistencyIssue() { final BigdataValueSerializer<Value> fixture = new BigdataValueSerializer<Value>(ValueFactoryImpl.getInstance()); final byte[] newValBytes = new byte[] {0, 2, 0, 14, 66, 114, 105, 97, 110, 32, 77, 99, 67, 97, 114, 116, 104, 121}; final byte[] oldValBytes = new byte[] { 0, 2, 0, 15, 127, 66, 114, 105, 97, 110, 32, 77, 99, 67, 97, 114, 116, 104, 121 }; final Value newValue = fixture.deserialize(newValBytes); final Value oldValue = fixture.deserialize(oldValBytes); if (log.isInfoEnabled()) { log.info("new=" + newValue); log.info("old=" + oldValue); } /* * Note: This uses the default Locale and the implied Unicode collation * order to generate the sort keys. */ // final IKeyBuilder keyBuilder = new KeyBuilder(); /* * Note: This allows you to explicitly configure the behavior of the * KeyBuilder instance based on the specified properties. If you want * your KB to run with these properties, then you need to specify them * either in your environment or using -D to java. */ final Properties properties = new Properties(); // specify that all aspects of the Unicode sequence are significant. properties.setProperty(KeyBuilder.Options.STRENGTH, StrengthEnum.Identical.toString()); // // specify that that only primary character differences are significant. // properties.setProperty(KeyBuilder.Options.STRENGTH,StrengthEnum.Primary.toString()); final IKeyBuilder keyBuilder = KeyBuilder.newUnicodeInstance(properties); final LexiconKeyBuilder lexKeyBuilder = new LexiconKeyBuilder(keyBuilder); // encode as unsigned byte[] key. final byte[] newValKey = lexKeyBuilder.value2Key(newValue); final byte[] oldValKey = lexKeyBuilder.value2Key(oldValue); if (log.isInfoEnabled()) { log.info("newValKey=" + BytesUtil.toString(newValKey)); log.info("oldValKey=" + BytesUtil.toString(oldValKey)); } /* * Note: if this assert fails then the two distinct Literals were mapped * onto the same unsigned byte[] key. */ assertFalse(BytesUtil.bytesEqual(newValKey, oldValKey)); }
/** * Test method for {@link * RDFJSONUtility.kmr.scam.rest.util.RDFJSON#graphToRdfJsonPreordered(java.util.Set, * java.io.Writer)} . * * @throws JSONException */ @Test public void testModelToRdfJsonPreorderedSetOfStatementWriter() throws Exception { // final Set<Statement> testStatements = new TreeSet<Statement>(new StatementComparator()); final Model testStatements = new TreeModel(); final ValueFactoryImpl vf = ValueFactoryImpl.getInstance(); final BNode testBNode1 = vf.createBNode(); final BNode testBNode2 = vf.createBNode(); final URI testURI1 = vf.createURI("http://example.org/test/rdf/json/1"); final URI testURI2 = vf.createURI("http://my.test.org/rdf/type/2"); final URI testURI3 = vf.createURI("http://example.org/test/rdf/json/3"); final URI testURI4 = vf.createURI("http://example.org/test/rdf/json/4"); final URI testURI5 = vf.createURI("http://my.test.org/rdf/type/5"); final Statement testStatement1 = vf.createStatement(testURI1, testURI2, testURI3); testStatements.add(testStatement1); final Statement testStatement2 = vf.createStatement(testURI1, testURI2, testBNode1); testStatements.add(testStatement2); final Statement testStatement3 = vf.createStatement(testURI1, testURI2, testBNode2); testStatements.add(testStatement3); final Statement testStatement4 = vf.createStatement(testURI4, testURI2, testURI3); testStatements.add(testStatement4); final Statement testStatement5 = vf.createStatement(testURI4, testURI2, testBNode2); testStatements.add(testStatement5); final Statement testStatement6 = vf.createStatement(testURI4, testURI2, testBNode1); testStatements.add(testStatement6); final Statement testStatement7 = vf.createStatement(testBNode1, testURI5, testBNode2); testStatements.add(testStatement7); final Statement testStatement8 = vf.createStatement(testBNode1, testURI5, testURI1); testStatements.add(testStatement8); final Statement testStatement9 = vf.createStatement(testBNode1, testURI5, testURI4); testStatements.add(testStatement9); // RDFJSONUnitTest.log.info("testStatements=" + testStatements); Assert.assertEquals(9, testStatements.size()); // Verify that the statements are in an acceptable order (testStatement5 and testStatement6 // can be legitimately swapped) final Iterator<Statement> testStatementIterator = testStatements.iterator(); Assert.assertTrue(testStatementIterator.hasNext()); // testStatement7 should always be first by virtue of the fact that it has two blank nodes // and no other statements have two blank nodes Assert.assertEquals(testStatement7, testStatementIterator.next()); Assert.assertTrue(testStatementIterator.hasNext()); // Then testStatement8 Assert.assertEquals(testStatement8, testStatementIterator.next()); Assert.assertTrue(testStatementIterator.hasNext()); // Then testStatement9 Assert.assertEquals(testStatement9, testStatementIterator.next()); Assert.assertTrue(testStatementIterator.hasNext()); Rio.write(testStatements, this.testWriter, RDFFormat.RDFJSON); // RDFJSONUtility.modelToRdfJson(testStatements, this.testWriter, this.testWriterConfig); this.testOutput = this.testWriter.toString(); Assert.assertTrue(this.testOutput.length() > 0); Assert.assertTrue(this.testOutput.startsWith("{")); Assert.assertTrue(this.testOutput.endsWith("}")); // RDFJSONUnitTest.log.info("testOutput=" + this.testOutput); final int firstBlankNode = this.testOutput.indexOf("\"_:"); // Test that a bnode exists after the opening brace Assert.assertTrue(firstBlankNode > 0); // The first value after the first blank node should be a blank node identifier final int firstValue = this.testOutput.indexOf("\"value\" : \"_:", firstBlankNode); Assert.assertTrue("A suitable blank node value was not found", firstValue > 0); // This should be guaranteed by the indexOf contract, but doing a quick check anyway Assert.assertTrue(firstValue > firstBlankNode); // Do a quick check to see if the testOutput is valid JSON // FIXME: TODO: Test using Jackson // Assert.fail("TODO: Implement me using Jackson"); // final JSONObject testJSONObject = new JSONObject(this.testOutput); // Assert.assertNotNull(testJSONObject); // Assert.assertTrue(testJSONObject.length() > 0); // Assert.assertTrue(testJSONObject.names().length() > 0); // Assert.assertTrue(testJSONObject.keys().hasNext()); }
public class Component implements Comparable<Component> { private int removalDepth; // 0 private String path; // component/1 private Vertex delegate; // http:...dbpedia private int delegateDegree; // private Component primarySubcomponent; // {component/1/3} private HashSet<Component> secondarySubcomponents = new HashSet<Component>(); // {component/1/1} // {component/1/5} private HashSet<Component> subs = null; // Union of primary and secondary. private HashSet<Vertex> leafs = new HashSet<Vertex>(); // http:...linked-cruchbase, // http:...data-cnr-it // ... protected static ValueFactory vf = ValueFactoryImpl.getInstance(); /** @param path */ public Component(int removalDepth, String path, Vertex delegate, int delegateDegree) { this.removalDepth = removalDepth; this.path = path; this.delegate = delegate; this.delegateDegree = delegateDegree; System.err.println( "Snapping " + delegate.getProperty("component") + " delegate " + delegate.getId().toString() + " (with " + delegateDegree + " connections)" + " at depth " + removalDepth); } public String getPath() { return this.path; } public int getRemovalDepth() { return this.removalDepth; } public Vertex getDelegate() { return this.delegate; } public long getDelegateDegree() { return this.delegateDegree; } public void setPrimarySubcomponent(Component sub) { if (null != sub && null != sub.getPath() && null != sub.getDelegate()) { this.primarySubcomponent = sub; } // System.err.println(this.getPath()+"("+this.getDelegate().getId()+")'s *primary* subcomponent: // "+sub.getPath()+"("+sub.getDelegate().getId()+")"); } public void setSecondarySubcomponent(Collection<Component> subs) { for (Component sub : subs) { if (null != sub && null != sub.getPath() && null != sub.getDelegate()) { this.secondarySubcomponents.add(sub); } // System.err.println(this.getPath()+"("+this.getDelegate().getId()+")'s secondary // subcomponent: "+sub.getPath()+"("+sub.getDelegate().getId()+")"); } } // // // public Set<Vertex> getLeafs() { return this.leafs; } /** @return both the primary and secondary components (without a distinction). */ public Set<Component> getSubcomponents() { if (subs == null) { subs = new HashSet<Component>(secondarySubcomponents); if (null != this.primarySubcomponent) { subs.add(this.primarySubcomponent); } } return subs; } /** @return */ public Component getPrimarySubcomponent() { return this.primarySubcomponent; } /** @return */ public Set<Component> getSecondarySubcomponents() { return this.secondarySubcomponents; } /** * @param id * @return */ public int addLeaf(Vertex id) { this.leafs.add(id); return leafs.size(); } @Override public int compareTo(Component o) { return o.delegateDegree - this.delegateDegree; } /** * @param i * @param size * @return */ public void describe(PrintStream out) { describe(out, ""); } /** * @param out * @param indent */ private void describe(PrintStream out, String indent) { out.print( "\n" + indent + "When " + getPath() + "'s delegate <" + getDelegate().getId() + "> was removed,\n" + indent + " it flung " + getLeafs().size() + " individual nodes\n" + indent + " and caused " + getSubcomponents().size() + " isolated connected components:\n"); // Primary if (null != getPrimarySubcomponent()) { out.print(indent + " " + getPrimarySubcomponent().getPath()); getPrimarySubcomponent().describe(out, indent + " "); } else { // out.println(indent+" (no primary subcomponent) "+getPrimarySubcomponent()); } // Secondary if (getSecondarySubcomponents().size() > 0) { for (Component sub : getSecondarySubcomponents()) { out.print(indent + " " + sub.getPath()); sub.describe(out, indent + " "); } } else { // out.println(indent+" (no secondary subcomponents) "+getSubcomponents()); } } /** * @param out * @param conn * @param root * @param base */ public void describe(String base, RepositoryConnection conn, Resource reportR, PrintStream out) { describe(base, "", conn, reportR, out, null); } /** * @param base - grows with each call down. * @param indent - * @param conn - * @param reportR - named graph to write into. * @param out - * @param root - stays the same, so each node can point to its root. */ private void describe( String base, String indent, RepositoryConnection conn, Resource reportR, PrintStream out, Resource root) { out.print( "\n" + indent + "When " + getPath() + "'s delegate <" + getDelegate().getId() + "> was removed,\n" + indent + " it flung " + getLeafs().size() + " individual nodes\n" + indent + " and caused " + getSubcomponents().size() + " isolated connected components:\n"); Resource component = vf.createURI(base + "/" + getPath()); try { conn.add(component, RDF.a, PROVO.Collection, reportR); if (this.removalDepth == 0) { conn.add(component, RDF.a, VSR.Root, reportR); root = component; } conn.add(component, Centrifuge.hasRoot, root, reportR); conn.add(component, DCTerms.identifier, vf.createLiteral(getPath()), reportR); conn.add( component, PROVO.specializationOf, vf.createURI(this.delegate.getId().toString()), reportR); conn.add(component, VSR.depth, vf.createLiteral(this.removalDepth), reportR); for (Vertex leaf : this.getLeafs()) { Resource leafR = vf.createURI(leaf.getId().toString()); conn.add(component, DCTerms.hasPart, leafR, reportR); } } catch (RepositoryException e) { e.printStackTrace(); } // Primary if (null != getPrimarySubcomponent()) { out.print(indent + " " + getPrimarySubcomponent().getPath()); try { Resource subR = vf.createURI(base + "/" + getPrimarySubcomponent().getPath()); conn.add(component, DCTerms.hasPart, subR, reportR); conn.add(subR, RDF.a, Centrifuge.Primary, reportR); conn.add(subR, RDF.a, PROVO.Collection, reportR); conn.add( subR, DCTerms.identifier, vf.createLiteral(getPrimarySubcomponent().getPath()), reportR); } catch (RepositoryException e) { e.printStackTrace(); } getPrimarySubcomponent().describe(base, indent + " ", conn, reportR, out, root); } else { // out.println(indent+" (no primary subcomponent) "+getPrimarySubcomponent()); } // Secondary if (getSecondarySubcomponents().size() > 0) { for (Component sub : getSecondarySubcomponents()) { Resource subR = vf.createURI(base + "/" + sub.getPath()); try { conn.add(component, DCTerms.hasPart, subR, reportR); } catch (RepositoryException e) { e.printStackTrace(); } } for (Component sub : getSecondarySubcomponents()) { Resource subR = vf.createURI(base + "/" + sub.getPath()); try { conn.add(subR, RDF.a, Centrifuge.Secondary, reportR); conn.add(subR, RDF.a, PROVO.Collection, reportR); conn.add( subR, DCTerms.identifier, vf.createLiteral(getPrimarySubcomponent().getPath()), reportR); conn.commit(); } catch (RepositoryException e) { e.printStackTrace(); } } for (Component sub : getSecondarySubcomponents()) { out.print(indent + " " + sub.getPath()); sub.describe(base, indent + " ", conn, reportR, out, root); } } else { // out.println(indent+" (no secondary subcomponents) "+getSubcomponents()); } } }
static { ValueFactory factory = ValueFactoryImpl.getInstance(); SAILTYPE = factory.createURI(NAMESPACE, "sailType"); DELEGATE = factory.createURI(NAMESPACE, "delegate"); }