/**
   * Set the cache control and last modified HTTP headers from data in the graph
   *
   * @param httpHeaders
   * @param rdf
   */
  public static void setCachingHeaders(
      final MultivaluedMap<String, Object> httpHeaders, final Dataset rdf, final UriInfo uriInfo) {

    final List<PathSegment> segments = uriInfo.getPathSegments();
    if (!segments.isEmpty() && segments.get(0).getPath().startsWith("tx:")) {
      // Do not set caching headers if we are in a transaction
      return;
    }

    httpHeaders.put(CACHE_CONTROL, of((Object) "max-age=0", (Object) "must-revalidate"));

    LOGGER.trace(
        "Attempting to discover the last-modified date of the node for the resource in question...");
    final Iterator<Quad> iterator =
        rdf.asDatasetGraph().find(ANY, getDatasetSubject(rdf), lastModifiedPredicate, ANY);

    if (!iterator.hasNext()) {
      return;
    }

    final Object dateObject = iterator.next().getObject().getLiteralValue();

    if (!(dateObject instanceof XSDDateTime)) {
      LOGGER.debug("Found last-modified date, but it was not an XSDDateTime: {}", dateObject);

      return;
    }

    final XSDDateTime lastModified = (XSDDateTime) dateObject;
    LOGGER.debug("Found last-modified date: {}", lastModified);
    final String lastModifiedAsRdf2822 =
        RFC2822DATEFORMAT.print(new DateTime(lastModified.asCalendar()));
    httpHeaders.put(LAST_MODIFIED, of((Object) lastModifiedAsRdf2822));
  }
 @Override
 public ClientResponse handle(ClientRequest clientRequest) throws ClientHandlerException {
   final MultivaluedMap<String, Object> headers = clientRequest.getHeaders();
   List<Object> hcookies = headers.get(COOKIE_HEADER);
   if (hcookies == null) {
     hcookies = new ArrayList<Object>();
   }
   hcookies.addAll(cookies);
   headers.put(COOKIE_HEADER, hcookies);
   return getNext().handle(clientRequest);
 }
예제 #3
0
  @Test
  public void testNoQueryParams() throws Exception {
    JsonApiDocument jsonApiDocument = new JsonApiDocument();

    MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<>();
    queryParams.put("include", Arrays.asList("children", "spouses"));
    includedProcessor.execute(jsonApiDocument, parentRecord1, Optional.empty());

    Assert.assertNull(
        jsonApiDocument.getIncluded(),
        "Included Processor adds no resources when not given query params");
  }
예제 #4
0
  @Test
  public void testNoQueryIncludeParams() throws Exception {
    JsonApiDocument jsonApiDocument = new JsonApiDocument();

    MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<>();
    queryParams.put("unused", Collections.emptyList());
    includedProcessor.execute(jsonApiDocument, parentRecord1, Optional.of(queryParams));

    Assert.assertNull(
        jsonApiDocument.getIncluded(),
        "Included Processor adds no resources when not given query params");
  }
예제 #5
0
 private static void processResponseHeaders(
     final HttpFields respHeaders, final ClientResponse jerseyResponse) {
   for (final HttpField header : respHeaders) {
     final String headerName = header.getName();
     final MultivaluedMap<String, String> headers = jerseyResponse.getHeaders();
     List<String> list = headers.get(headerName);
     if (list == null) {
       list = new ArrayList<String>();
     }
     list.add(header.getValue());
     headers.put(headerName, list);
   }
 }
예제 #6
0
  /**
   * Extracts any query params that start with 'filter'.
   *
   * @param queryParams
   * @return extracted filter params
   */
  private static MultivaluedMap<String, String> getFilterParams(
      MultivaluedMap<String, String> queryParams) {
    MultivaluedMap<String, String> returnMap = new MultivaluedHashMap<>();

    queryParams
        .entrySet()
        .stream()
        .filter((entry) -> entry.getKey().startsWith("filter"))
        .forEach(
            (entry) -> {
              returnMap.put(entry.getKey(), entry.getValue());
            });
    return returnMap;
  }
예제 #7
0
  @Before
  public void setUp() throws Exception {
    System.setProperty("ddf.home", ".");
    callbackURI = new URL("https://localhost:12345/services/csw/subscription/event");
    ObjectFactory objectFactory = new ObjectFactory();
    request = new GetRecordsType();
    request.setOutputSchema(CswConstants.CSW_OUTPUT_SCHEMA);
    request.setResultType(ResultType.RESULTS);
    request.getResponseHandler().add(callbackURI.toString());
    queryType = new QueryType();
    elementSetNameType = new ElementSetNameType();
    elementSetNameType.setValue(ElementSetType.BRIEF);
    queryType.setElementSetName(elementSetNameType);
    request.setAbstractQuery(objectFactory.createAbstractQuery(queryType));
    transformerManager = mock(TransformerManager.class);
    transformer = mock(QueryResponseTransformer.class);
    binaryContent = mock(BinaryContent.class);
    when(transformerManager.getTransformerBySchema(
            Matchers.contains(CswConstants.CSW_OUTPUT_SCHEMA)))
        .thenReturn(transformer);
    when(transformer.transform(any(SourceResponse.class), anyMap())).thenReturn(binaryContent);
    when(binaryContent.getByteArray()).thenReturn("byte array with message contents".getBytes());
    query = mock(QueryRequest.class);

    metacard = mock(Metacard.class);
    webclient = mock(WebClient.class);
    mockCxfClientFactory = mock(SecureCxfClientFactory.class);
    response = mock(Response.class);
    subject = mock(Subject.class);

    mockSecurity = mock(Security.class);
    headers.put(Subject.class.toString(), Arrays.asList(new Subject[] {subject}));
    AccessPlugin accessPlugin = mock(AccessPlugin.class);
    accessPlugins.add(accessPlugin);
    when(mockCxfClientFactory.getWebClient()).thenReturn(webclient);
    //        when(webclient.head()).thenReturn(response);
    when(webclient.invoke(anyString(), any(QueryResponse.class))).thenReturn(response);
    when(response.getHeaders()).thenReturn(headers);
    when(accessPlugin.processPostQuery(any(QueryResponse.class)))
        .thenAnswer(
            new Answer<QueryResponse>() {
              @Override
              public QueryResponse answer(InvocationOnMock invocationOnMock) throws Throwable {
                return (QueryResponse) invocationOnMock.getArguments()[0];
              }
            });

    sendEvent = new SendEventExtension(transformerManager, request, query, mockCxfClientFactory);
    sendEvent.setSubject(subject);
  }
예제 #8
0
  @Test
  public void testExecuteSingleRelation() throws Exception {
    JsonApiDocument jsonApiDocument = new JsonApiDocument();

    MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<>();
    queryParams.put("include", Collections.singletonList("children"));
    includedProcessor.execute(jsonApiDocument, parentRecord1, Optional.of(queryParams));

    List<Resource> expectedIncluded = Collections.singletonList(childRecord1.toResource());
    List<Resource> actualIncluded = jsonApiDocument.getIncluded();

    Assert.assertEquals(
        actualIncluded,
        expectedIncluded,
        "Included Processor added single requested resource from 'include' query param");
  }
예제 #9
0
  @Test
  public void testExecuteMultipleNestedRelations() throws Exception {
    JsonApiDocument jsonApiDocument = new JsonApiDocument();

    MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<>();
    queryParams.put("include", Collections.singletonList("children.friends"));
    includedProcessor.execute(jsonApiDocument, parentRecord3, Optional.of(queryParams));

    Set<Resource> expectedIncluded =
        Sets.newHashSet(
            childRecord1.toResource(),
            childRecord2.toResource(),
            childRecord3.toResource(),
            childRecord4.toResource());
    Set<Resource> actualIncluded = new HashSet<>(jsonApiDocument.getIncluded());

    Assert.assertEquals(
        actualIncluded,
        expectedIncluded,
        "Included Processor added multiple nested requested resource collections from 'include' query param");
  }
예제 #10
0
  @Test
  public void testExecuteSingleRelationOnCollection() throws Exception {
    JsonApiDocument jsonApiDocument = new JsonApiDocument();

    Set<PersistentResource> parents = new HashSet<>();
    parents.add(parentRecord1);
    parents.add(parentRecord2);

    MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<>();
    queryParams.put("include", Collections.singletonList("children"));
    includedProcessor.execute(jsonApiDocument, parents, Optional.of(queryParams));

    List<Resource> expectedIncluded =
        Arrays.asList(childRecord1.toResource(), childRecord2.toResource());
    List<Resource> actualIncluded = jsonApiDocument.getIncluded();

    Assert.assertEquals(
        actualIncluded,
        expectedIncluded,
        "Included Processor added requested resource from all records");
  }
예제 #11
0
  // Ignored already selected dimensions
  private Map<String, Collection<String>> retrieveDimensionValues(
      String collection,
      long baselineMillis,
      long currentMillis,
      double contributionThreshold,
      int dimensionValuesLimit)
      throws Exception {
    List<String> dimensions = getAllDimensions(collection);
    DateTime baseline = new DateTime(baselineMillis);
    DateTime current = new DateTime(currentMillis);

    List<String> metrics = getMetrics(collection);
    String dummyFunction =
        String.format(
            DIMENSION_VALUES_OPTIONS_METRIC_FUNCTION, METRIC_FUNCTION_JOINER.join(metrics));

    MultivaluedMap<String, String> dimensionValues = new MultivaluedMapImpl();
    Map<String, Future<QueryResult>> resultFutures = new HashMap<>();
    // query w/ group by for each dimension.
    for (String dimension : dimensions) {
      // Generate SQL
      dimensionValues.put(dimension, Arrays.asList("!"));
      String sql =
          SqlUtils.getSql(dummyFunction, collection, baseline, current, dimensionValues, null);
      LOGGER.info("Generated SQL for dimension retrieval {}: {}", serverUri, sql);
      dimensionValues.remove(dimension);

      // Query (in parallel)
      resultFutures.put(dimension, queryCache.getQueryResultAsync(serverUri, sql));
    }

    Map<String, Collection<String>> collectedDimensionValues = new HashMap<>();
    // Wait for all queries and generate the ordered list from the result.
    for (int i = 0; i < dimensions.size(); i++) {
      String dimension = dimensions.get(i);
      QueryResult queryResult = resultFutures.get(dimension).get();

      // Sum up hourly data over entire dataset for each dimension combination
      int metricCount = metrics.size();
      double[] total = new double[metricCount];
      Map<String, double[]> summedValues = new HashMap<>();

      for (Map.Entry<String, Map<String, Number[]>> entry : queryResult.getData().entrySet()) {
        double[] sum = new double[metricCount];
        for (Map.Entry<String, Number[]> hourlyEntry : entry.getValue().entrySet()) {
          for (int j = 0; j < metricCount; j++) {
            double value = hourlyEntry.getValue()[j].doubleValue();
            sum[j] += value;
          }
        }
        summedValues.put(entry.getKey(), sum);
        // update total w/ sums for each dimension value.
        for (int j = 0; j < metricCount; j++) {
          total[j] += sum[j];
        }
      }

      // compare by value ascending (want poll to remove smallest element)
      List<PriorityQueue<Pair<String, Double>>> topNValuesByMetric =
          new ArrayList<PriorityQueue<Pair<String, Double>>>(metricCount);
      double[] threshold = new double[metricCount];
      Comparator<Pair<String, Double>> valueComparator =
          new Comparator<Pair<String, Double>>() {
            @Override
            public int compare(Pair<String, Double> a, Pair<String, Double> b) {
              return Double.compare(a.getValue().doubleValue(), b.getValue().doubleValue());
            }
          };
      for (int j = 0; j < metricCount; j++) {
        threshold[j] = total[j] * contributionThreshold;
        topNValuesByMetric.add(new PriorityQueue<>(dimensionValuesLimit, valueComparator));
      }

      // For each dimension value, add it only if it meets the threshold and drop an element from
      // the priority queue if over the limit.
      for (Map.Entry<String, double[]> entry : summedValues.entrySet()) {
        List<String> combination = objectMapper.readValue(entry.getKey(), LIST_TYPE_REF);
        String dimensionValue = combination.get(i);
        for (int j = 0; j < metricCount; j++) { // metricCount == entry.getValue().length
          double dimensionValueContribution = entry.getValue()[j];
          if (dimensionValueContribution >= threshold[j]) {
            PriorityQueue<Pair<String, Double>> topNValues = topNValuesByMetric.get(j);
            topNValues.add(new Pair<>(dimensionValue, dimensionValueContribution));
            if (topNValues.size() > dimensionValuesLimit) {
              topNValues.poll();
            }
          }
        }
      }

      // Poll returns the elements in order of ascending contribution, so poll and reverse the
      // order.

      // not LinkedHashSet because we need to reverse insertion order with metrics.
      List<String> sortedValues = new ArrayList<>();
      HashSet<String> sortedValuesSet = new HashSet<>();

      for (int j = 0; j < metricCount; j++) {
        PriorityQueue<Pair<String, Double>> topNValues = topNValuesByMetric.get(j);
        int startIndex = sortedValues.size();
        while (!topNValues.isEmpty()) {
          Pair<String, Double> pair = topNValues.poll();
          String dimensionValue = pair.getKey();
          if (!sortedValuesSet.contains(dimensionValue)) {
            sortedValues.add(startIndex, dimensionValue);
            sortedValuesSet.add(dimensionValue);
          }
        }
      }

      collectedDimensionValues.put(dimension, sortedValues);
    }
    return collectedDimensionValues;
  }
 @Override
 public List<V> put(final String key, final List<V> value) {
   return delegate.put(key, value);
 }
 public List<V> put(K k, List<V> vs) {
   return delegate.put(k, vs);
 }