/** Tests for various MP3 specific bits of metadata */ public void testFileSpecificMetadata(String mimetype, Map<QName, Serializable> properties) { QName songTitle = QName.createQName("music", "songTitle"); assertEquals( "Property " + songTitle + " not found for mimetype " + mimetype, QUICK_TITLE, DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(songTitle))); QName songArtist = QName.createQName("music", "artist"); assertEquals( "Property " + songArtist + " not found for mimetype " + mimetype, ARTIST, DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(songArtist))); // Description is a composite - check the artist part assertContains( "Property " + ContentModel.PROP_DESCRIPTION + " didn't contain " + ARTIST + " for mimetype " + mimetype, ARTIST, DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(ContentModel.PROP_DESCRIPTION))); }
/** * We don't have quite the usual metadata. Tests the descriptions one. Other tests in {@link * #testFileSpecificMetadata(String, Map)} */ protected void testCommonMetadata(String mimetype, Map<QName, Serializable> properties) { // Title is as normal assertEquals( "Property " + ContentModel.PROP_TITLE + " not found for mimetype " + mimetype, QUICK_TITLE, DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(ContentModel.PROP_TITLE))); // Has Author, not Creator, and is different assertEquals( "Property " + ContentModel.PROP_AUTHOR + " not found for mimetype " + mimetype, "Hauskaz", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(ContentModel.PROP_AUTHOR))); // Description is a composite assertContains( "Property " + ContentModel.PROP_DESCRIPTION + " didn't contain " + QUICK_TITLE + " for mimetype " + mimetype, QUICK_TITLE, DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(ContentModel.PROP_DESCRIPTION))); // Check rest of it later }
private String getValueAsString(Serializable value) { Object converted = DefaultTypeConverter.INSTANCE.convert( getServiceRegistry().getDictionaryService().getDataType(DataTypeDefinition.NODE_REF), value); String asString = DefaultTypeConverter.INSTANCE.convert(String.class, converted); return asString; }
/** We have no author, and have the same title and description */ protected void testCommonMetadata(String mimetype, Map<QName, Serializable> properties) { assertEquals( "Property " + ContentModel.PROP_TITLE + " not found for mimetype " + mimetype, QUICK_TITLE, DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(ContentModel.PROP_TITLE))); assertEquals( "Property " + ContentModel.PROP_DESCRIPTION + " not found for mimetype " + mimetype, QUICK_TITLE, DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(ContentModel.PROP_DESCRIPTION))); }
/** @return Returns true if the pattern is present, otherwise false. */ public boolean like( NodeRef nodeRef, QName propertyQName, String sqlLikePattern, boolean includeFTS) { if (propertyQName == null) { throw new IllegalArgumentException("Property QName is mandatory for the like expression"); } StringBuilder sb = new StringBuilder(sqlLikePattern.length() * 3); if (includeFTS) { // convert the SQL-like pattern into a Lucene-compatible string String pattern = SearchLanguageConversion.convertXPathLikeToLucene(sqlLikePattern.toLowerCase()); // build Lucene search string specific to the node sb = new StringBuilder(); sb.append("+ID:\"").append(nodeRef.toString()).append("\" +("); // FTS or attribute matches if (includeFTS) { sb.append("TEXT:(").append(pattern).append(") "); } if (propertyQName != null) { sb.append(" @") .append( SearchLanguageConversion.escapeLuceneQuery( QName.createQName( propertyQName.getNamespaceURI(), ISO9075.encode(propertyQName.getLocalName())) .toString())) .append(":(") .append(pattern) .append(")"); } sb.append(")"); ResultSet resultSet = null; try { resultSet = this.query(nodeRef.getStoreRef(), "lucene", sb.toString()); boolean answer = resultSet.length() > 0; return answer; } finally { if (resultSet != null) { resultSet.close(); } } } else { // convert the SQL-like pattern into a Lucene-compatible string String pattern = SearchLanguageConversion.convertXPathLikeToRegex(sqlLikePattern.toLowerCase()); Serializable property = nodeService.getProperty(nodeRef, propertyQName); if (property == null) { return false; } else { String propertyString = DefaultTypeConverter.INSTANCE.convert( String.class, nodeService.getProperty(nodeRef, propertyQName)); return propertyString.toLowerCase().matches(pattern); } } }
/* * (non-Javadoc) * @see org.alfresco.cmis.property.PropertyLuceneBuilder#buildLuceneIn(org.alfresco.repo.search.impl.lucene.LuceneQueryParser, java.util.Collection, java.lang.Boolean, org.alfresco.repo.search.impl.querymodel.PredicateMode) */ public Query buildLuceneIn( AbstractLuceneQueryParser lqp, Collection<Serializable> values, Boolean not, PredicateMode mode) throws ParseException { String field = getLuceneFieldName(); // Check type conversion @SuppressWarnings("unused") Object converted = DefaultTypeConverter.INSTANCE.convert( getServiceRegistry().getDictionaryService().getDataType(DataTypeDefinition.NODE_REF), values); Collection<String> asStrings = DefaultTypeConverter.INSTANCE.convert(String.class, values); if (asStrings.size() == 0) { if (not) { return new MatchAllDocsQuery(); } else { return new TermQuery(new Term("NO_TOKENS", "__")); } } else if (asStrings.size() == 1) { String value = asStrings.iterator().next(); if (not) { return lqp.getDoesNotMatchFieldQuery( field, value, AnalysisMode.IDENTIFIER, LuceneFunction.FIELD); } else { return lqp.getFieldQuery(field, value, AnalysisMode.IDENTIFIER, LuceneFunction.FIELD); } } else { BooleanQuery booleanQuery = new BooleanQuery(); if (not) { booleanQuery.add(new MatchAllDocsQuery(), Occur.MUST); } for (String value : asStrings) { Query any = lqp.getFieldQuery(field, value, AnalysisMode.IDENTIFIER, LuceneFunction.FIELD); if (not) { booleanQuery.add(any, Occur.MUST_NOT); } else { booleanQuery.add(any, Occur.SHOULD); } } return booleanQuery; } }
/** Test our extra IMAP properties */ public void testFileSpecificMetadata(String mimetype, Map<QName, Serializable> properties) { // Check the other cm: ones assertEquals( "Property " + ContentModel.PROP_ORIGINATOR + " not found for mimetype " + mimetype, QUICK_CREATOR + " <" + QUICK_CREATOR_EMAIL + ">", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(ContentModel.PROP_ORIGINATOR))); assertEquals( "Property " + ContentModel.PROP_SENTDATE + " not found for mimetype " + mimetype, "2004-06-04T13:23:22.000+01:00", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(ContentModel.PROP_SENTDATE))); // Check some imap: ones assertEquals( "Test Property " + MESSAGE_FROM_TEST_PROPERTY + " incorrect for mimetype " + mimetype, "Nevin Nollop <*****@*****.**>", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(MESSAGE_FROM_TEST_PROPERTY))); assertEquals( "Test Property " + MESSAGE_FROM_TEST_PROPERTY + " incorrect for mimetype " + mimetype, "Nevin Nollop <*****@*****.**>", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(MESSAGE_FROM_TEST_PROPERTY))); assertEquals( "Test Property " + MESSAGE_TO_TEST_PROPERTY + " incorrect for mimetype " + mimetype, "Nevin Nollop <*****@*****.**>", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(MESSAGE_TO_TEST_PROPERTY))); // Finally check our non-standard ones we added in at test time assertTrue( "Test Property " + MESSAGE_FROM_TEST_PROPERTY + " not found for mimetype " + mimetype, properties.containsKey(MESSAGE_FROM_TEST_PROPERTY)); assertTrue( "Test Property " + MESSAGE_TO_TEST_PROPERTY + " not found for mimetype " + mimetype, properties.containsKey(MESSAGE_TO_TEST_PROPERTY)); assertTrue( "Test Property " + MESSAGE_CC_TEST_PROPERTY + " not found for mimetype " + mimetype, properties.containsKey(MESSAGE_CC_TEST_PROPERTY)); assertEquals( "Test Property " + MESSAGE_FROM_TEST_PROPERTY + " incorrect for mimetype " + mimetype, "Nevin Nollop <*****@*****.**>", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(MESSAGE_FROM_TEST_PROPERTY))); assertEquals( "Test Property " + MESSAGE_TO_TEST_PROPERTY + " incorrect for mimetype " + mimetype, "Nevin Nollop <*****@*****.**>", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(MESSAGE_TO_TEST_PROPERTY))); assertEquals( "Test Property " + MESSAGE_CC_TEST_PROPERTY + " incorrect for mimetype " + mimetype, "Nevin Nollop <*****@*****.**>", DefaultTypeConverter.INSTANCE.convert( String.class, properties.get(MESSAGE_CC_TEST_PROPERTY))); }
@Override protected String finishImpl(FacesContext context, String outcome) throws Exception { // Try and extract metadata from the file ContentReader cr = new FileContentReader(this.file); cr.setMimetype(this.mimeType); cr.setEncoding(this.encoding); // create properties for content type Map<QName, Serializable> contentProps = new HashMap<QName, Serializable>(5, 1.0f); if (Repository.extractMetadata(FacesContext.getCurrentInstance(), cr, contentProps)) { this.author = (String) (contentProps.get(ContentModel.PROP_AUTHOR)); this.title = DefaultTypeConverter.INSTANCE.convert( String.class, contentProps.get(ContentModel.PROP_TITLE)); this.description = DefaultTypeConverter.INSTANCE.convert( String.class, contentProps.get(ContentModel.PROP_DESCRIPTION)); } // default the title to the file name if not set if (this.title == null) { this.title = this.fileName; } // determine whether inline editing should be enabled by default. // if the mime type of the added file is in the list of mime types // configured in "Content Wizards" then enable inline editing List<String> mimeTypes = getInlineEditableMimeTypes(); if (mimeTypes.contains(this.mimeType)) { this.inlineEdit = true; } saveContent(this.file, null); // return default outcome return outcome; }
@Override public String getOldPath(String site, String path) { PersistenceManagerService persistenceManagerService = _servicesManager.getService(PersistenceManagerService.class); NodeRef nodeRef = persistenceManagerService.getNodeRef( SITE_REPO_ROOT_PATTERN.replaceAll(SITE_REPLACEMENT_PATTERN, site), path); if (nodeRef != null) { if (persistenceManagerService.hasAspect(nodeRef, CStudioContentModel.ASPECT_RENAMED)) { String oldPath = DefaultTypeConverter.INSTANCE.convert( String.class, persistenceManagerService.getProperty( nodeRef, CStudioContentModel.PROP_RENAMED_OLD_URL)); return oldPath; } } return null; }
@Override public String getFilename(String site, String path) { if (path != null && !path.isEmpty()) { PersistenceManagerService persistenceManagerService = _servicesManager.getService(PersistenceManagerService.class); NodeRef nodeRef = persistenceManagerService.getNodeRef( SITE_REPO_ROOT_PATTERN.replaceAll(SITE_REPLACEMENT_PATTERN, site), path); if (nodeRef != null) { return DefaultTypeConverter.INSTANCE.convert( String.class, persistenceManagerService.getProperty(nodeRef, ContentModel.PROP_NAME)); } else { int idx = path.lastIndexOf("/"); if (idx > 0) { return path.substring(idx + 1); } else { return path; } } } else { return ""; } }
/** * Helper method to extract file info from a specific node. * * <p>This method goes direct to the repo for all information and no data is cached here. * * @param nodeRef the node * @param readOnly, should the file be shown as "read only", regardless of its permissions? * @param lockedFilesAsOffline should a locked file be marked as offline * @return Returns the file information pertinent to the node * @throws FileNotFoundException if the path refers to a non-existent file */ private ContentFileInfo getFileInformationImpl( NodeRef nodeRef, boolean readOnly, boolean lockedFilesAsOffline) throws FileNotFoundException { // get the file info org.alfresco.service.cmr.model.FileInfo fileFolderInfo = fileFolderService.getFileInfo(nodeRef); // retrieve required properties and create new JLAN file info ContentFileInfo fileInfo = new ContentFileInfo(nodeRef); // Set the file id from the node's DBID long id = DefaultTypeConverter.INSTANCE.convert( Long.class, nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID)); fileInfo.setFileId((int) (id & 0xFFFFFFFFL)); // unset all attribute flags int fileAttributes = 0; fileInfo.setFileAttributes(fileAttributes); if (fileFolderInfo.isFolder()) { // add directory attribute fileAttributes |= FileAttribute.Directory; fileInfo.setFileAttributes(fileAttributes); fileInfo.setFileType(FileType.Directory); } else { Map<QName, Serializable> nodeProperties = fileFolderInfo.getProperties(); // Get the file size from the content ContentData contentData = (ContentData) nodeProperties.get(ContentModel.PROP_CONTENT); long size = 0L; if (contentData != null) { size = contentData.getSize(); } fileInfo.setSize(size); // Set the allocation size by rounding up the size to a 512 byte block boundary if (size > 0) { fileInfo.setAllocationSize((size + 512L) & 0xFFFFFFFFFFFFFE00L); } // Check whether the file is locked if (nodeService.hasAspect(nodeRef, ContentModel.ASPECT_LOCKABLE)) { LockType lockType = lockService.getLockType(nodeRef); int attr = fileInfo.getFileAttributes(); if (lockType != null) { switch (lockType) { case NODE_LOCK: if ((attr & FileAttribute.ReadOnly) == 0) attr += FileAttribute.ReadOnly; break; case WRITE_LOCK: LockStatus lockStatus = lockService.getLockStatus(nodeRef); if (lockStatus == LockStatus.LOCK_OWNER) { } else { if ((attr & FileAttribute.ReadOnly) == 0) { attr += FileAttribute.ReadOnly; } if (lockedFilesAsOffline) { attr += FileAttribute.NTOffline; } } break; case READ_ONLY_LOCK: if ((attr & FileAttribute.ReadOnly) == 0) { attr += FileAttribute.ReadOnly; } if (lockedFilesAsOffline) { attr += FileAttribute.NTOffline; } break; } fileInfo.setFileAttributes(attr); } } // Check if it is a link node if (fileFolderInfo.isLink()) { fileInfo.setLinkNodeRef(fileFolderInfo.getLinkNodeRef()); } } // created Date createdDate = fileFolderInfo.getCreatedDate(); if (createdDate != null) { long created = DefaultTypeConverter.INSTANCE.longValue(createdDate); fileInfo.setCreationDateTime(created); } // modified Date modifiedDate = fileFolderInfo.getModifiedDate(); if (modifiedDate != null) { long modified = DefaultTypeConverter.INSTANCE.longValue(modifiedDate); fileInfo.setModifyDateTime(modified); fileInfo.setAccessDateTime(modified); fileInfo.setChangeDateTime(modified); } // name String name = fileFolderInfo.getName(); if (name != null) { fileInfo.setFileName(name); // Check for file names that should be hidden if (hiddenAspect.getVisibility(Client.cifs, fileInfo.getNodeRef()) == Visibility.HiddenAttribute) { // Add the hidden file attribute int attr = fileInfo.getFileAttributes(); if ((attr & FileAttribute.Hidden) == 0) { attr += FileAttribute.Hidden; fileInfo.setFileAttributes(attr); } } } // Read/write access if (!fileFolderInfo.isFolder() || isReadOnlyFlagOnFolders) { boolean deniedPermission = permissionService.hasPermission(nodeRef, PermissionService.WRITE) == AccessStatus.DENIED; if (readOnly || deniedPermission) { int attr = fileInfo.getFileAttributes(); if ((attr & FileAttribute.ReadOnly) == 0) { attr += FileAttribute.ReadOnly; fileInfo.setFileAttributes(attr); } } } // Set the normal file attribute if no other attributes are set if (fileInfo.getFileAttributes() == 0) fileInfo.setFileAttributes(FileAttribute.NTNormal); // Debug if (logger.isDebugEnabled()) { logger.debug("Fetched file info: \n" + " info: " + fileInfo); } // Return the file information return fileInfo; }
/* (non-Javadoc) * @see org.alfresco.service.cmr.view.Exporter#value(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName, java.io.Serializable) */ public void value(NodeRef nodeRef, QName property, Object value, int index) { try { // determine data type of value QName valueDataType = null; PropertyDefinition propDef = dictionaryService.getProperty(property); DataTypeDefinition dataTypeDef = (propDef == null) ? null : propDef.getDataType(); if (dataTypeDef == null || dataTypeDef.getName().equals(DataTypeDefinition.ANY)) { dataTypeDef = (value == null) ? null : dictionaryService.getDataType(value.getClass()); if (dataTypeDef != null) { valueDataType = dataTypeDef.getName(); } } // convert node references to paths if (value instanceof NodeRef && referenceType.equals(ReferenceType.PATHREF)) { NodeRef valueNodeRef = (NodeRef) value; if (nodeRef.getStoreRef().equals(valueNodeRef.getStoreRef())) { Path nodeRefPath = createPath(context.getExportOf(), nodeRef, valueNodeRef); value = (nodeRefPath == null) ? null : nodeRefPath.toPrefixString(namespaceService); } } // output value wrapper if value is null or property data type is ANY or value is part of // collection if (value == null || valueDataType != null || index != -1) { AttributesImpl attrs = new AttributesImpl(); if (value == null) { attrs.addAttribute( NamespaceService.REPOSITORY_VIEW_PREFIX, ISNULL_LOCALNAME, ISNULL_QNAME.toPrefixString(), null, "true"); } if (valueDataType != null) { attrs.addAttribute( NamespaceService.REPOSITORY_VIEW_PREFIX, DATATYPE_LOCALNAME, DATATYPE_QNAME.toPrefixString(), null, toPrefixString(valueDataType)); } contentHandler.startElement( NamespaceService.REPOSITORY_VIEW_PREFIX, VALUE_LOCALNAME, toPrefixString(VALUE_QNAME), attrs); } // output value String strValue = (String) DefaultTypeConverter.INSTANCE.convert(String.class, value); if (strValue != null) { for (int i = 0; i < strValue.length(); i++) { char[] temp = new char[] {strValue.charAt(i)}; contentHandler.characters(temp, 0, 1); } } // output value wrapper if property data type is any if (value == null || valueDataType != null || index != -1) { contentHandler.endElement( NamespaceService.REPOSITORY_VIEW_PREFIX, VALUE_LOCALNAME, toPrefixString(VALUE_QNAME)); } } catch (SAXException e) { throw new ExporterException( "Failed to process value event - nodeRef " + nodeRef + "; property " + toPrefixString(property) + "; value " + value, e); } }
/* (non-Javadoc) * @see org.alfresco.service.cmr.view.Exporter#start() */ public void start(ExporterContext context) { try { this.context = context; contentHandler.startDocument(); contentHandler.startPrefixMapping( NamespaceService.REPOSITORY_VIEW_PREFIX, NamespaceService.REPOSITORY_VIEW_1_0_URI); contentHandler.startElement( NamespaceService.REPOSITORY_VIEW_PREFIX, VIEW_LOCALNAME, VIEW_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); // // output metadata // contentHandler.startElement( NamespaceService.REPOSITORY_VIEW_PREFIX, METADATA_LOCALNAME, METADATA_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); // exported by contentHandler.startElement( NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTEDBY_LOCALNAME, EXPORTEDBY_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); contentHandler.characters( context.getExportedBy().toCharArray(), 0, context.getExportedBy().length()); contentHandler.endElement( NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTEDBY_LOCALNAME, EXPORTEDBY_QNAME.toPrefixString()); // exported date contentHandler.startElement( NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTEDDATE_LOCALNAME, EXPORTEDDATE_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); String date = DefaultTypeConverter.INSTANCE.convert(String.class, context.getExportedDate()); contentHandler.characters(date.toCharArray(), 0, date.length()); contentHandler.endElement( NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTEDDATE_LOCALNAME, EXPORTEDDATE_QNAME.toPrefixString()); // exporter version contentHandler.startElement( NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTERVERSION_LOCALNAME, EXPORTERVERSION_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); contentHandler.characters( context.getExporterVersion().toCharArray(), 0, context.getExporterVersion().length()); contentHandler.endElement( NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTERVERSION_LOCALNAME, EXPORTERVERSION_QNAME.toPrefixString()); // export of contentHandler.startElement( NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTOF_LOCALNAME, EXPORTOF_QNAME.toPrefixString(), EMPTY_ATTRIBUTES); String path = nodeService.getPath(context.getExportOf()).toPrefixString(namespaceService); contentHandler.characters(path.toCharArray(), 0, path.length()); contentHandler.endElement( NamespaceService.REPOSITORY_VIEW_PREFIX, EXPORTOF_LOCALNAME, EXPORTOF_QNAME.toPrefixString()); contentHandler.endElement( NamespaceService.REPOSITORY_VIEW_PREFIX, METADATA_LOCALNAME, METADATA_QNAME.toPrefixString()); } catch (SAXException e) { throw new ExporterException("Failed to process export start event", e); } }
public ResultSet executeQuery(final SearchParameters searchParameters, String language) { if (repositoryState.isBootstrapping()) { throw new AlfrescoRuntimeException( "SOLR queries can not be executed while the repository is bootstrapping"); } try { StoreRef store = extractStoreRef(searchParameters); SolrStoreMappingWrapper mapping = extractMapping(store); Locale locale = extractLocale(searchParameters); URLCodec encoder = new URLCodec(); StringBuilder url = new StringBuilder(); Pair<HttpClient, String> httpClientAndBaseUrl = mapping.getHttpClientAndBaseUrl(); HttpClient httpClient = httpClientAndBaseUrl.getFirst(); url.append(httpClientAndBaseUrl.getSecond()); String languageUrlFragment = extractLanguageFragment(language); url.append("/").append(languageUrlFragment); // Send the query in JSON only // url.append("?q="); // url.append(encoder.encode(searchParameters.getQuery(), "UTF-8")); url.append("?wt=").append(encoder.encode("json", "UTF-8")); url.append("&fl=").append(encoder.encode("DBID,score", "UTF-8")); if ((searchParameters.getStores().size() > 1) || (mapping.isSharded())) { boolean requiresSeparator = false; url.append("&shards="); for (StoreRef storeRef : searchParameters.getStores()) { SolrStoreMappingWrapper storeMapping = extractMapping(storeRef); if (requiresSeparator) { url.append(','); } else { requiresSeparator = true; } url.append(storeMapping.getShards()); } } // Emulate old limiting behaviour and metadata final LimitBy limitBy; int maxResults = -1; if (searchParameters.getMaxItems() >= 0) { maxResults = searchParameters.getMaxItems(); limitBy = LimitBy.FINAL_SIZE; } else if (searchParameters.getLimitBy() == LimitBy.FINAL_SIZE && searchParameters.getLimit() >= 0) { maxResults = searchParameters.getLimit(); limitBy = LimitBy.FINAL_SIZE; } else { maxResults = searchParameters.getMaxPermissionChecks(); if (maxResults < 0) { maxResults = maximumResultsFromUnlimitedQuery; } limitBy = LimitBy.NUMBER_OF_PERMISSION_EVALUATIONS; } url.append("&rows=").append(String.valueOf(maxResults)); url.append("&df=").append(encoder.encode(searchParameters.getDefaultFieldName(), "UTF-8")); url.append("&start=").append(encoder.encode("" + searchParameters.getSkipCount(), "UTF-8")); url.append("&locale="); url.append(encoder.encode(locale.toString(), "UTF-8")); url.append("&") .append(SearchParameters.ALTERNATIVE_DICTIONARY) .append("=") .append(alternativeDictionary); for (String paramName : searchParameters.getExtraParameters().keySet()) { url.append("&") .append(paramName) .append("=") .append(searchParameters.getExtraParameters().get(paramName)); } StringBuffer sortBuffer = buildSortParameters(searchParameters, encoder); url.append(sortBuffer); if (searchParameters.getPermissionEvaluation() != PermissionEvaluationMode.NONE) { url.append("&fq=").append(encoder.encode("{!afts}AUTHORITY_FILTER_FROM_JSON", "UTF-8")); } if (searchParameters.getExcludeTenantFilter() == false) { url.append("&fq=").append(encoder.encode("{!afts}TENANT_FILTER_FROM_JSON", "UTF-8")); } if (searchParameters.getFieldFacets().size() > 0) { url.append("&facet=").append(encoder.encode("true", "UTF-8")); for (FieldFacet facet : searchParameters.getFieldFacets()) { url.append("&facet.field=").append(encoder.encode(facet.getField(), "UTF-8")); if (facet.getEnumMethodCacheMinDF() != 0) { url.append("&") .append( encoder.encode("f." + facet.getField() + ".facet.enum.cache.minDf", "UTF-8")) .append("=") .append(encoder.encode("" + facet.getEnumMethodCacheMinDF(), "UTF-8")); } url.append("&") .append(encoder.encode("f." + facet.getField() + ".facet.limit", "UTF-8")) .append("=") .append(encoder.encode("" + facet.getLimit(), "UTF-8")); if (facet.getMethod() != null) { url.append("&") .append(encoder.encode("f." + facet.getField() + ".facet.method", "UTF-8")) .append("=") .append( encoder.encode( facet.getMethod() == FieldFacetMethod.ENUM ? "enum" : "fc", "UTF-8")); } if (facet.getMinCount() != 0) { url.append("&") .append(encoder.encode("f." + facet.getField() + ".facet.mincount", "UTF-8")) .append("=") .append(encoder.encode("" + facet.getMinCount(), "UTF-8")); } if (facet.getOffset() != 0) { url.append("&") .append(encoder.encode("f." + facet.getField() + ".facet.offset", "UTF-8")) .append("=") .append(encoder.encode("" + facet.getOffset(), "UTF-8")); } if (facet.getPrefix() != null) { url.append("&") .append(encoder.encode("f." + facet.getField() + ".facet.prefix", "UTF-8")) .append("=") .append(encoder.encode("" + facet.getPrefix(), "UTF-8")); } if (facet.getSort() != null) { url.append("&") .append(encoder.encode("f." + facet.getField() + ".facet.sort", "UTF-8")) .append("=") .append( encoder.encode( facet.getSort() == FieldFacetSort.COUNT ? "count" : "index", "UTF-8")); } } for (String facetQuery : searchParameters.getFacetQueries()) { url.append("&facet.query=").append(encoder.encode("{!afts}" + facetQuery, "UTF-8")); } } // end of field facets final String searchTerm = searchParameters.getSearchTerm(); String spellCheckQueryStr = null; if (searchTerm != null && searchParameters.isSpellCheck()) { StringBuilder builder = new StringBuilder(); builder.append("&spellcheck.q=").append(encoder.encode(searchTerm, "UTF-8")); builder.append("&spellcheck=").append(encoder.encode("true", "UTF-8")); spellCheckQueryStr = builder.toString(); url.append(spellCheckQueryStr); } JSONObject body = new JSONObject(); body.put("query", searchParameters.getQuery()); // Authorities go over as is - and tenant mangling and query building takes place on the SOLR // side Set<String> allAuthorisations = permissionService.getAuthorisations(); boolean includeGroups = includeGroupsForRoleAdmin ? true : !allAuthorisations.contains(PermissionService.ADMINISTRATOR_AUTHORITY); JSONArray authorities = new JSONArray(); for (String authority : allAuthorisations) { if (includeGroups) { authorities.put(authority); } else { if (AuthorityType.getAuthorityType(authority) != AuthorityType.GROUP) { authorities.put(authority); } } } body.put("authorities", authorities); body.put("anyDenyDenies", anyDenyDenies); JSONArray tenants = new JSONArray(); tenants.put(tenantService.getCurrentUserDomain()); body.put("tenants", tenants); JSONArray locales = new JSONArray(); for (Locale currentLocale : searchParameters.getLocales()) { locales.put(DefaultTypeConverter.INSTANCE.convert(String.class, currentLocale)); } if (locales.length() == 0) { locales.put(I18NUtil.getLocale()); } body.put("locales", locales); JSONArray templates = new JSONArray(); for (String templateName : searchParameters.getQueryTemplates().keySet()) { JSONObject template = new JSONObject(); template.put("name", templateName); template.put("template", searchParameters.getQueryTemplates().get(templateName)); templates.put(template); } body.put("templates", templates); JSONArray allAttributes = new JSONArray(); for (String attribute : searchParameters.getAllAttributes()) { allAttributes.put(attribute); } body.put("allAttributes", allAttributes); body.put("defaultFTSOperator", searchParameters.getDefaultFTSOperator()); body.put("defaultFTSFieldOperator", searchParameters.getDefaultFTSFieldOperator()); body.put("queryConsistency", searchParameters.getQueryConsistency()); if (searchParameters.getMlAnalaysisMode() != null) { body.put("mlAnalaysisMode", searchParameters.getMlAnalaysisMode().toString()); } body.put("defaultNamespace", searchParameters.getNamespace()); JSONArray textAttributes = new JSONArray(); for (String attribute : searchParameters.getTextAttributes()) { textAttributes.put(attribute); } body.put("textAttributes", textAttributes); final int maximumResults = maxResults; // just needed for the final parameter return (ResultSet) postSolrQuery( httpClient, url.toString(), body, new SolrJsonProcessor<SolrJSONResultSet>() { @Override public SolrJSONResultSet getResult(JSONObject json) { return new SolrJSONResultSet( json, searchParameters, nodeService, nodeDAO, limitBy, maximumResults); } }, spellCheckQueryStr); } catch (UnsupportedEncodingException e) { throw new LuceneQueryParserException("", e); } catch (HttpException e) { throw new LuceneQueryParserException("", e); } catch (IOException e) { throw new LuceneQueryParserException("", e); } catch (JSONException e) { throw new LuceneQueryParserException("", e); } }
private CategoryPaths getCategoryPaths( NodeRef nodeRef, Set<QName> aspects, Map<QName, Serializable> properties) { ArrayList<Pair<Path, QName>> categoryPaths = new ArrayList<Pair<Path, QName>>(); ArrayList<ChildAssociationRef> categoryParents = new ArrayList<ChildAssociationRef>(); nodeDAO.setCheckNodeConsistency(); for (QName classRef : aspects) { AspectDefinition aspDef = dictionaryService.getAspect(classRef); if (!isCategorised(aspDef)) { continue; } LinkedList<Pair<Path, QName>> aspectPaths = new LinkedList<Pair<Path, QName>>(); for (PropertyDefinition propDef : aspDef.getProperties().values()) { if (!propDef.getDataType().getName().equals(DataTypeDefinition.CATEGORY)) { // The property is not a category continue; } // Don't try to iterate if the property is null Serializable propVal = properties.get(propDef.getName()); if (propVal == null) { continue; } for (NodeRef catRef : DefaultTypeConverter.INSTANCE.getCollection(NodeRef.class, propVal)) { if (catRef == null) { continue; } // can be running in context of System user, hence use input nodeRef catRef = tenantService.getName(nodeRef, catRef); try { Pair<Long, NodeRef> pair = nodeDAO.getNodePair(catRef); if (pair != null) { for (Path path : nodeDAO.getPaths(pair, false)) { aspectPaths.add(new Pair<Path, QName>(path, aspDef.getName())); } } } catch (InvalidNodeRefException e) { // If the category does not exists we move on the next } } } categoryPaths.addAll(aspectPaths); } // Add member final element for (Pair<Path, QName> pair : categoryPaths) { if (pair.getFirst().last() instanceof Path.ChildAssocElement) { Path.ChildAssocElement cae = (Path.ChildAssocElement) pair.getFirst().last(); ChildAssociationRef assocRef = cae.getRef(); ChildAssociationRef categoryParentRef = new ChildAssociationRef( assocRef.getTypeQName(), assocRef.getChildRef(), QName.createQName("member"), nodeRef); pair.getFirst().append(new Path.ChildAssocElement(categoryParentRef)); categoryParents.add(categoryParentRef); } } return new CategoryPaths(categoryPaths, categoryParents); }
/* * Parameterise the query string - not sure if it is required to escape lucence spacials chars The parameters could * be used to build the query - the contents of parameters should alread have been escaped if required. ... mush * better to provide the parameters and work out what to do TODO: conditional query escapement - may be we should * have a parameter type that is not escaped */ private String parameterise( String unparameterised, Map<QName, QueryParameterDefinition> map, QueryParameter[] queryParameters, NamespacePrefixResolver nspr) throws QueryParameterisationException { Map<QName, List<Serializable>> valueMap = new HashMap<QName, List<Serializable>>(); if (queryParameters != null) { for (QueryParameter parameter : queryParameters) { List<Serializable> list = valueMap.get(parameter.getQName()); if (list == null) { list = new ArrayList<Serializable>(); valueMap.put(parameter.getQName(), list); } list.add(parameter.getValue()); } } Map<QName, ListIterator<Serializable>> iteratorMap = new HashMap<QName, ListIterator<Serializable>>(); List<QName> missing = new ArrayList<QName>(1); StringBuilder buffer = new StringBuilder(unparameterised); int index = 0; while ((index = buffer.indexOf("${", index)) != -1) { int endIndex = buffer.indexOf("}", index); String qNameString = buffer.substring(index + 2, endIndex); QName key = QName.createQName(qNameString, nspr); QueryParameterDefinition parameterDefinition = map.get(key); if (parameterDefinition == null) { missing.add(key); buffer.replace(index, endIndex + 1, ""); } else { ListIterator<Serializable> it = iteratorMap.get(key); if ((it == null) || (!it.hasNext())) { List<Serializable> list = valueMap.get(key); if ((list != null) && (list.size() > 0)) { it = list.listIterator(); } if (it != null) { iteratorMap.put(key, it); } } String value; if (it == null) { value = parameterDefinition.getDefault(); } else { value = DefaultTypeConverter.INSTANCE.convert(String.class, it.next()); } buffer.replace(index, endIndex + 1, value); } } if (missing.size() > 0) { StringBuilder error = new StringBuilder(); error.append("The query uses the following parameters which are not defined: "); for (QName qName : missing) { error.append(qName); error.append(", "); } error.delete(error.length() - 1, error.length() - 1); error.delete(error.length() - 1, error.length() - 1); throw new QueryParameterisationException(error.toString()); } return buffer.toString(); }
/* * (non-Javadoc) * @see org.xml.sax.ContentHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes) */ public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException { try { // construct qname for element QName elementName = decodeQName(QName.createQName(qName, importResolver)); // setup parent context ParentContext parentContext = null; if (contextStack.empty()) { // create root parent context parentContext = new ParentContext(elementName, dictionaryService, importer); } else { // create parent context NodeContext parentNode = (NodeContext) contextStack.peek(); parentContext = new ParentContext(elementName, parentNode); } // create node context NodeContext node = new NodeContext(elementName, parentContext, null); node.setChildName(elementName.toPrefixString(importResolver)); contextStack.push(node); // process node properties for (int i = 0; i < atts.getLength(); i++) { QName propName = decodeQName(QName.createQName(atts.getURI(i), atts.getLocalName(i))); String value = atts.getValue(i); // // process "well-known" properties // if (propName.equals(JCRPrimaryTypeProperty.PROPERTY_NAME)) { // primary type QName primaryTypeQName = QName.createQName(value, importResolver); TypeDefinition typeDef = dictionaryService.getType(primaryTypeQName); if (typeDef == null) { throw new InvalidTypeException(primaryTypeQName); } node.setTypeDefinition(typeDef); } else if (propName.equals(JCRMixinTypesProperty.PROPERTY_NAME)) { // aspects String[] aspects = value.split(" "); for (String aspect : aspects) { // ignore JCR specific aspects QName aspectQName = QName.createQName(aspect, importResolver); if (!(JCRNamespace.JCR_URI.equals(aspectQName.getNamespaceURI()) || JCRNamespace.MIX_URI.equals(aspectQName.getNamespaceURI()))) { AspectDefinition aspectDef = dictionaryService.getAspect(aspectQName); if (aspectDef == null) { throw new InvalidTypeException(aspectQName); } node.addAspect(aspectDef); } } } else if (JCRUUIDProperty.PROPERTY_NAME.equals(propName)) { node.setUUID(value); } // // Note: ignore JCR specific properties // else if (JCRNamespace.JCR_URI.equals(propName.getNamespaceURI())) { } // // process all other properties // else { // determine type of property PropertyDefinition propDef = dictionaryService.getProperty(propName); if (propDef == null) { throw new ImporterException( "Property " + propName + " is not known to the repository data dictionary"); } DataTypeDefinition dataTypeDef = propDef.getDataType(); // extract values from node xml attribute String[] propValues = null; PropertyContext propertyContext = new PropertyContext(elementName, node, propName, dataTypeDef.getName()); if (dataTypeDef.getName().equals(DataTypeDefinition.CONTENT)) { // Note: we only support single valued content properties propValues = new String[] {value}; } else { // attempt to split multi-value properties propValues = value.split(" "); } // extract values appropriately for (String propValue : propValues) { propertyContext.startValue(); propertyContext.appendCharacters(propValue.toCharArray(), 0, propValue.length()); propertyContext.endValue(); } // add each value to the node if (propertyContext.isMultiValue()) { node.addPropertyCollection(propName); } List<StringBuffer> nodeValues = propertyContext.getValues(); for (StringBuffer nodeValue : nodeValues) { // first, cast value to appropriate type (using JCR converters) Serializable objVal = (Serializable) session.getTypeConverter().convert(dataTypeDef, nodeValue.toString()); String strValue = DefaultTypeConverter.INSTANCE.convert(String.class, objVal); node.addProperty(propName, strValue); } } } // import node NodeRef nodeRef = node.getImporter().importNode(node); node.setNodeRef(nodeRef); } catch (Exception e) { throw new SAXException("Failed to process element " + qName, e); } }