Ejemplo n.º 1
0
  public JahiaEquiJoinMerger(
      Join join,
      Map<String, PropertyValue> columns,
      OperandEvaluator evaluator,
      QueryObjectModelFactory factory,
      EquiJoinCondition condition)
      throws RepositoryException {
    super(join, columns, evaluator, factory, condition);

    PropertyValue property1 =
        factory.propertyValue(condition.getSelector1Name(), condition.getProperty1Name());
    PropertyValue property2 =
        factory.propertyValue(condition.getSelector2Name(), condition.getProperty2Name());

    if (leftSelectors.contains(property1.getSelectorName())
        && rightSelectors.contains(property2.getSelectorName())) {
      leftProperty = property1;
      rightProperty = property2;
    } else if (leftSelectors.contains(property2.getSelectorName())
        && rightSelectors.contains(property1.getSelectorName())) {
      leftProperty = property2;
      rightProperty = property1;
    } else {
      throw new RepositoryException("Invalid equi-join");
    }
  }
Ejemplo n.º 2
0
 private Comparison stringComparisonConstraint(
     DynamicOperand operand,
     String valueOperandShouldBe,
     QueryObjectModelFactory qomFactory,
     ValueFactory valueFactory)
     throws RepositoryException {
   return qomFactory.comparison(
       operand,
       QueryObjectModelFactory.JCR_OPERATOR_EQUAL_TO,
       qomFactory.literal(valueFactory.createValue(valueOperandShouldBe, PropertyType.STRING)));
 }
  /** {@inheritDoc} */
  public List<RepositoryFile> getDeletedFiles(
      final Session session,
      final PentahoJcrConstants pentahoJcrConstants,
      final String origParentFolderPath,
      final String filter)
      throws RepositoryException {
    Node trashNode = getOrCreateTrashInternalFolderNode(session, pentahoJcrConstants);

    // query Trash Structure 2
    QueryObjectModelFactory fac = session.getWorkspace().getQueryManager().getQOMFactory();
    final String selectorName = "selector"; // $NON-NLS-1$

    // selector
    final Selector selector = fac.selector("nt:base", selectorName); // $NON-NLS-1$
    // constraint1
    Constraint origParentFolderPathConstraint =
        fac.comparison(
            fac.propertyValue(selectorName, pentahoJcrConstants.getPHO_ORIGPARENTFOLDERPATH()),
            QueryObjectModelConstants.JCR_OPERATOR_EQUAL_TO,
            fac.literal(session.getValueFactory().createValue(origParentFolderPath)));
    // constraint2
    Constraint origNameConstraint = null;
    if (StringUtils.hasLength(filter)) {
      String convertedFilter = filter.replace('*', '%');
      origNameConstraint =
          fac.comparison(
              fac.propertyValue(selectorName, pentahoJcrConstants.getPHO_ORIGNAME()),
              QueryObjectModelConstants.JCR_OPERATOR_LIKE,
              fac.literal(session.getValueFactory().createValue(convertedFilter)));
    }
    // constraint3
    Constraint descendantNodeConstraint = fac.descendantNode(selectorName, trashNode.getPath());
    // AND together constraints
    Constraint allConstraints = fac.and(descendantNodeConstraint, origParentFolderPathConstraint);
    if (StringUtils.hasLength(filter)) {
      allConstraints = fac.and(allConstraints, origNameConstraint);
    }
    Query query = fac.createQuery(selector, allConstraints, null, null);
    QueryResult result =
        session
            .getWorkspace()
            .getQueryManager()
            .createQuery(query.getStatement(), Query.JCR_JQOM)
            .execute();

    NodeIterator nodeIter = result.getNodes();
    List<RepositoryFile> deletedFiles = new ArrayList<RepositoryFile>();

    while (nodeIter.hasNext()) {
      Node trashFileIdNode = nodeIter.nextNode();
      if (trashFileIdNode.hasNodes()) {
        // since the nodes returned by the query are the trash file ID nodes, need to
        // getNodes().nextNode() to get
        // first
        // (and only) child
        deletedFiles.add(
            nodeToDeletedFile(session, pentahoJcrConstants, trashFileIdNode.getNodes().nextNode()));
      } else {
        throw new RuntimeException(
            Messages.getInstance()
                .getString("DefaultDeleteHelper.ERROR_0002_NOT_CLEAN")); // $NON-NLS-1$
      }
    }

    // now we need to handle legacy trash since legacy trashed files don't have origParentFolderPath
    // property

    Set<RepositoryFile> mergedDeletedFiles = new HashSet<RepositoryFile>();
    mergedDeletedFiles.addAll(deletedFiles);
    mergedDeletedFiles.addAll(
        legacyGetDeletedFiles(
            session,
            pentahoJcrConstants,
            pathConversionHelper.relToAbs(origParentFolderPath),
            filter));

    List<RepositoryFile> mergedList = new ArrayList<RepositoryFile>(mergedDeletedFiles);
    Collections.sort(mergedList);
    return mergedList;
  }
Ejemplo n.º 4
0
  @GET
  @Path("/{type}")
  @Produces({Utils.MEDIA_TYPE_APPLICATION_HAL_PLUS_JSON, MediaType.APPLICATION_JSON})
  public Object getByType(
      @PathParam("workspace") String workspace,
      @PathParam("language") String language,
      @PathParam("type") String type,
      @QueryParam("nameContains") List<String> nameConstraints,
      @QueryParam("orderBy") String orderBy,
      @QueryParam("limit") int limit,
      @QueryParam("offset") int offset,
      @QueryParam("depth") int depth,
      @Context UriInfo context) {

    if (API.isQueryDisabled()) {
      APIExceptionMapper.LOGGER.debug("Types endpoint is disabled. Attempted query on " + type);
      return Response.status(Response.Status.NOT_FOUND).build();
    }

    final String unescapedNodetype = Names.unescape(type);
    if (API.excludedNodeTypes.contains(unescapedNodetype)) {
      return Response.status(Response.Status.FORBIDDEN)
          .entity("'" + unescapedNodetype + "' is not available for querying.")
          .build();
    }

    Session session = null;

    try {

      session = getSession(workspace, language);
      final QueryObjectModelFactory qomFactory =
          session.getWorkspace().getQueryManager().getQOMFactory();
      final ValueFactory valueFactory = session.getValueFactory();
      final Selector selector = qomFactory.selector(unescapedNodetype, SELECTOR_NAME);

      // language constraint: either jcr:language doesn't exist or jcr:language is current language
      Constraint constraint =
          qomFactory.or(
              qomFactory.not(qomFactory.propertyExistence(SELECTOR_NAME, Constants.JCR_LANGUAGE)),
              stringComparisonConstraint(
                  qomFactory.propertyValue(SELECTOR_NAME, Constants.JCR_LANGUAGE),
                  language,
                  qomFactory,
                  valueFactory));

      // if we have passed "nameContains" query parameters, only return nodes which name contains
      // the specified terms
      if (nameConstraints != null && !nameConstraints.isEmpty()) {
        for (String name : nameConstraints) {
          final Comparison likeConstraint =
              qomFactory.comparison(
                  qomFactory.nodeLocalName(SELECTOR_NAME),
                  QueryObjectModelFactory.JCR_OPERATOR_LIKE,
                  qomFactory.literal(
                      valueFactory.createValue("%" + name + "%", PropertyType.STRING)));
          constraint = qomFactory.and(constraint, likeConstraint);
        }
      }

      Ordering[] orderings = null;
      // ordering deactivated because it currently doesn't work, probably due to a bug in
      // QueryServiceImpl
      if (Utils.exists(orderBy)) {
        if ("desc".equalsIgnoreCase(orderBy)) {
          orderings =
              new Ordering[] {qomFactory.descending(qomFactory.nodeLocalName(SELECTOR_NAME))};
        } else {
          orderings =
              new Ordering[] {qomFactory.ascending(qomFactory.nodeLocalName(SELECTOR_NAME))};
        }
      }

      final QueryObjectModel query =
          qomFactory.createQuery(
              selector,
              constraint,
              orderings,
              new Column[] {qomFactory.column(SELECTOR_NAME, null, null)});
      if (limit > 0) {
        query.setLimit(limit);
      }
      query.setOffset(offset);

      final QueryResult queryResult = query.execute();

      final NodeIterator nodes = queryResult.getNodes();
      final List<JSONNode> result = new LinkedList<JSONNode>();
      final Filter filter = Utils.getFilter(context);
      while (nodes.hasNext()) {
        final Node resultNode = nodes.nextNode();
        if (filter.acceptChild(resultNode)) {
          JSONNode node = getFactory().createNode(resultNode, filter, depth);
          result.add(node);
        }
      }

      return Response.ok(result).build();
    } catch (Exception e) {
      throw new APIException(e);
    } finally {
      closeSession(session);
    }
  }