Пример #1
0
  @Override
  public RdfStream getTriples(
      final IdentifierConverter<Resource, FedoraResource> idTranslator,
      final Iterable<? extends Class<? extends RdfStream>> contexts) {
    final RdfStream stream = new RdfStream();

    for (final Class<? extends RdfStream> context : contexts) {
      try {
        final Constructor<? extends RdfStream> declaredConstructor =
            context.getDeclaredConstructor(FedoraResource.class, IdentifierConverter.class);

        final RdfStream rdfStream = declaredConstructor.newInstance(this, idTranslator);
        rdfStream.session(getSession());

        stream.concat(rdfStream);
      } catch (final NoSuchMethodException | InstantiationException | IllegalAccessException e) {
        // Shouldn't happen.
        throw propagate(e);
      } catch (final InvocationTargetException e) {
        final Throwable cause = e.getCause();
        if (cause instanceof RepositoryException) {
          throw new RepositoryRuntimeException(cause);
        }
        throw propagate(cause);
      }
    }

    return stream;
  }
Пример #2
0
  /* (non-Javadoc)
   * @see org.fcrepo.kernel.api.models.FedoraResource#updateProperties
   *     (org.fcrepo.kernel.api.identifiers.IdentifierConverter, java.lang.String, RdfStream)
   */
  @Override
  public void updateProperties(
      final IdentifierConverter<Resource, FedoraResource> idTranslator,
      final String sparqlUpdateStatement,
      final RdfStream originalTriples)
      throws MalformedRdfException, AccessDeniedException {

    if (!clean(sparqlUpdateStatement)) {
      throw new IllegalArgumentException(
          "Invalid SPARQL UPDATE statement:" + sparqlUpdateStatement);
    }

    final Model model = originalTriples.asModel();

    final JcrPropertyStatementListener listener =
        new JcrPropertyStatementListener(
            idTranslator, getSession(), idTranslator.reverse().convert(this).asNode());

    model.register(listener);

    final UpdateRequest request =
        create(sparqlUpdateStatement, idTranslator.reverse().convert(this).toString());
    model.setNsPrefixes(request.getPrefixMapping());
    execute(request, model);

    listener.assertNoExceptions();
  }
  @Before
  public void setup() throws RepositoryException {
    initMocks(this);

    when(mockSession.getWorkspace()).thenReturn(mockWorkspace);
    when(mockWorkspace.getNamespaceRegistry()).thenReturn(mockNamespaceRegistry);
    when(mockNamespaceRegistry.getPrefixes()).thenReturn(new String[] {});

    testData.session(mockSession);
    testData.topic(createURI("test:subject"));
    testData.concat(
        new Triple(
            createURI("test:subject"), createURI("test:predicate"), createLiteral("test:object")));
    testData.concat(
        new Triple(
            createURI("test:subject"),
            createURI(getRDFNamespaceForJcrNamespace(JCR_NAMESPACE) + "primaryType"),
            createLiteral("nt:file")));

    testData2.session(mockSession);
    testData2.topic(createURI("test:subject2"));
    testData2.concat(
        new Triple(
            createURI("test:subject2"),
            createURI(getRDFNamespaceForJcrNamespace(JCR_NAMESPACE) + "mixinTypes"),
            createLiteral("childOf:ntFile")));
    final UriInfo info = Mockito.mock(UriInfo.class);
    setField(testProvider, "uriInfo", info);
  }
  /**
   * This method returns an HTTP response with content body appropriate to the following arguments.
   *
   * @param rangeValue starting and ending byte offsets, see {@link Range}
   * @param limit is the number of child resources returned in the response, -1 for all
   * @param rdfStream to which response RDF will be concatenated
   * @return HTTP response
   * @throws IOException
   */
  protected Response getContent(final String rangeValue, final int limit, final RdfStream rdfStream)
      throws IOException {
    if (resource() instanceof FedoraBinary) {

      final String contentTypeString = ((FedoraBinary) resource()).getMimeType();

      final Lang lang = contentTypeToLang(contentTypeString);

      if (!contentTypeString.equals("text/plain") && lang != null) {

        final String format = lang.getName().toUpperCase();

        final InputStream content = ((FedoraBinary) resource()).getContent();

        final Model inputModel =
            createDefaultModel().read(content, (resource()).toString(), format);

        rdfStream.concat(Iterators.transform(inputModel.listStatements(), Statement::asTriple));
      } else {

        final MediaType mediaType = MediaType.valueOf(contentTypeString);
        if (MESSAGE_EXTERNAL_BODY.isCompatible(mediaType)
            && mediaType.getParameters().containsKey("access-type")
            && mediaType.getParameters().get("access-type").equals("URL")
            && mediaType.getParameters().containsKey("URL")) {
          try {
            return temporaryRedirect(new URI(mediaType.getParameters().get("URL"))).build();
          } catch (final URISyntaxException e) {
            throw new RepositoryRuntimeException(e);
          }
        }
        return getBinaryContent(rangeValue);
      }

    } else {
      rdfStream.concat(getResourceTriples(limit));
      if (prefer != null) {
        prefer.getReturn().addResponseHeaders(servletResponse);
      }
    }
    servletResponse.addHeader("Vary", "Accept, Range, Accept-Encoding, Accept-Language");

    return ok(rdfStream).build();
  }
Пример #5
0
  /* (non-Javadoc)
   * @see org.fcrepo.kernel.api.models.FedoraResource#replaceProperties
   *     (org.fcrepo.kernel.api.identifiers.IdentifierConverter, com.hp.hpl.jena.rdf.model.Model)
   */
  @Override
  public void replaceProperties(
      final IdentifierConverter<Resource, FedoraResource> idTranslator,
      final Model inputModel,
      final RdfStream originalTriples)
      throws MalformedRdfException {

    final RdfStream replacementStream =
        new RdfStream()
            .namespaces(inputModel.getNsPrefixMap())
            .topic(idTranslator.reverse().convert(this).asNode());

    final GraphDifferencingIterator differencer =
        new GraphDifferencingIterator(inputModel, originalTriples);

    final StringBuilder exceptions = new StringBuilder();
    try {
      new RdfRemover(idTranslator, getSession(), replacementStream.withThisContext(differencer))
          .consume();
    } catch (final ConstraintViolationException e) {
      throw e;
    } catch (final MalformedRdfException e) {
      exceptions.append(e.getMessage());
      exceptions.append("\n");
    }

    try {
      new RdfAdder(
              idTranslator,
              getSession(),
              replacementStream.withThisContext(differencer.notCommon()))
          .consume();
    } catch (final ConstraintViolationException e) {
      throw e;
    } catch (final MalformedRdfException e) {
      exceptions.append(e.getMessage());
    }

    if (exceptions.length() > 0) {
      throw new MalformedRdfException(exceptions.toString());
    }
  }
Пример #6
0
  /**
   * Add additional models to the RDF dataset for the given resource
   *
   * @param rdfStream the source stream we'll add named models to
   * @param resource the FedoraResourceImpl in question
   * @param uriInfo a JAX-RS UriInfo object to build URIs to resources
   * @param idTranslator the id translator
   */
  public void addHttpComponentModelsForResourceToStream(
      final RdfStream rdfStream,
      final FedoraResource resource,
      final UriInfo uriInfo,
      final IdentifierConverter<Resource, FedoraResource> idTranslator) {

    LOGGER.debug("Adding additional HTTP context triples to stream");
    getUriAwareTripleFactories()
        .forEach(
            (bean, factory) -> {
              LOGGER.debug("Adding response information using: {}", bean);
              final Model m = factory.createModelForResource(resource, uriInfo, idTranslator);
              rdfStream.concat(fromModel(m));
            });
  }
Пример #7
0
 @Test
 public void testFiltering() {
   assertEquals("Didn't get unmanaged triple!", unManagedTriple, testStream.next());
   assertFalse("Failed to filter managed triple!", testStream.hasNext());
 }
  /**
   * This method returns a stream of RDF triples associated with this target resource
   *
   * @param limit is the number of child resources returned in the response, -1 for all
   * @return {@link RdfStream}
   */
  protected RdfStream getResourceTriples(final int limit) {
    // use the thing described, not the description, for the subject of descriptive triples
    if (resource() instanceof NonRdfSourceDescription) {
      resource = ((NonRdfSourceDescription) resource()).getDescribedResource();
    }
    final PreferTag returnPreference;

    if (prefer != null && prefer.hasReturn()) {
      returnPreference = prefer.getReturn();
    } else if (prefer != null && prefer.hasHandling()) {
      returnPreference = prefer.getHandling();
    } else {
      returnPreference = PreferTag.emptyTag();
    }

    final LdpPreferTag ldpPreferences = new LdpPreferTag(returnPreference);

    final RdfStream rdfStream = new RdfStream();

    final Predicate<Triple> tripleFilter =
        ldpPreferences.prefersServerManaged() ? x -> true : IS_MANAGED_TRIPLE.negate();

    if (ldpPreferences.prefersServerManaged()) {
      rdfStream.concat(getTriples(LdpRdfContext.class));
    }

    rdfStream.concat(filter(getTriples(TypeRdfContext.class), tripleFilter::test));

    rdfStream.concat(filter(getTriples(PropertiesRdfContext.class), tripleFilter::test));

    if (!returnPreference.getValue().equals("minimal")) {

      // Additional server-managed triples about this resource
      if (ldpPreferences.prefersServerManaged()) {
        rdfStream.concat(getTriples(AclRdfContext.class));
        rdfStream.concat(getTriples(RootRdfContext.class));
        rdfStream.concat(getTriples(ContentRdfContext.class));
        rdfStream.concat(getTriples(ParentRdfContext.class));
      }

      // containment triples about this resource
      if (ldpPreferences.prefersContainment()) {
        rdfStream.concat(getTriples(ChildrenRdfContext.class).limit(limit));
      }

      // LDP container membership triples for this resource
      if (ldpPreferences.prefersMembership()) {
        rdfStream.concat(getTriples(LdpContainerRdfContext.class));
        rdfStream.concat(getTriples(LdpIsMemberOfRdfContext.class));
      }

      // Embed all hash and blank nodes
      // using IS_MANAGED_TRIPLE directly to avoid Prefer header logic (we never want them for hash
      // fragments)
      rdfStream.concat(filter(getTriples(HashRdfContext.class), IS_MANAGED_TRIPLE.negate()::test));
      rdfStream.concat(filter(getTriples(SkolemNodeRdfContext.class), tripleFilter::test));

      // Include inbound references to this object
      if (ldpPreferences.prefersReferences()) {
        rdfStream.concat(getTriples(ReferencesRdfContext.class));
      }

      // Embed the children of this object
      if (ldpPreferences.prefersEmbed()) {

        final Iterator<FedoraResource> children = resource().getChildren();

        rdfStream.concat(
            filter(
                concat(
                    transform(
                        children,
                        child ->
                            child.getTriples(
                                translator(),
                                ImmutableList.of(
                                    TypeRdfContext.class,
                                    PropertiesRdfContext.class,
                                    SkolemNodeRdfContext.class)))),
                tripleFilter::test));
      }
    }

    if (httpTripleUtil != null && ldpPreferences.prefersServerManaged()) {
      httpTripleUtil.addHttpComponentModelsForResourceToStream(
          rdfStream, resource(), uriInfo, translator());
    }

    return rdfStream;
  }