public BigDecimal calculateEntropy(Map<Integer, AttributeValue> attributeValues) throws MLException { BigDecimal attributeEntropyresult = new BigDecimal(0); for (AttributeValue attributeValue : attributeValues.values()) { int attributeCount = attributeValue.getAttributeValueCount(); BigDecimal innerResult = new BigDecimal(0); for (Integer corrospondingClassifiedCount : attributeValue.getClassifiedCountMap().values()) { BigDecimal fraction = BigDecimal.valueOf(corrospondingClassifiedCount) .divide(new BigDecimal(attributeCount), globalMathContext); BigDecimal logFraction = BigDecimal.valueOf(Math.log(fraction.doubleValue())) .divide(new BigDecimal(Math.log(2)), globalMathContext); innerResult = innerResult.add(fraction.multiply(logFraction, globalMathContext)); } innerResult = innerResult.negate(); attributeValue.setEntropy(innerResult); BigDecimal attributeGlobalFraction = BigDecimal.valueOf(attributeValue.getAttributeValueCount()) .divide(new BigDecimal(size()), globalMathContext); attributeEntropyresult = attributeEntropyresult.add(attributeGlobalFraction.multiply(innerResult)); } return attributeEntropyresult; }
public BigDecimal calculateEntropy( Map<Integer, AttributeValue> attributeValues, boolean withVariance) throws MLException { if (!withVariance) { return calculateEntropy(attributeValues); } else { BigDecimal attributeEntropyresult = new BigDecimal(0); for (AttributeValue attributeValue : attributeValues.values()) { int attributeValueCount = attributeValue.getAttributeValueCount(); BigDecimal innerVarianceResult = new BigDecimal(1); for (Integer corrospondingClassifiedCount : attributeValue.getClassifiedCountMap().values()) { BigDecimal fraction = BigDecimal.valueOf(corrospondingClassifiedCount) .divide(new BigDecimal(attributeValueCount), globalMathContext); innerVarianceResult.multiply(fraction, globalMathContext); } attributeValue.setEntropy(innerVarianceResult); BigDecimal attributeGlobalFraction = BigDecimal.valueOf(attributeValueCount) .divide(new BigDecimal(size()), globalMathContext); attributeEntropyresult = attributeEntropyresult.add(attributeGlobalFraction.multiply(innerVarianceResult)); } return attributeEntropyresult; } }
/** * Evaluate an entry to be added to see if it has any "aci" attribute type. If it does, examines * each "aci" attribute type value for syntax errors. All of the "aci" attribute type values must * pass syntax check for the add operation to proceed. Any entry with an "aci" attribute type must * have "modify-acl" privileges. * * @param entry The entry to be examined. * @param operation The operation to to check privileges on. * @param clientDN The authorization DN. * @return True if the entry has no ACI attributes or if all of the "aci" attributes values pass * ACI syntax checking. * @throws DirectoryException If a modified ACI could not be decoded. */ private boolean verifySyntax(Entry entry, Operation operation, DN clientDN) throws DirectoryException { if (entry.hasOperationalAttribute(aciType)) { /* * Check that the operation has "modify-acl" privileges since the * entry to be added has an "aci" attribute type. */ if (!operation.getClientConnection().hasPrivilege(Privilege.MODIFY_ACL, operation)) { Message message = INFO_ACI_ADD_FAILED_PRIVILEGE.get( String.valueOf(entry.getDN()), String.valueOf(clientDN)); logError(message); return false; } List<Attribute> attributeList = entry.getOperationalAttribute(aciType, null); for (Attribute attribute : attributeList) { for (AttributeValue value : attribute) { try { DN dn = entry.getDN(); Aci.decode(value.getValue(), dn); } catch (AciException ex) { Message message = WARN_ACI_ADD_FAILED_DECODE.get(String.valueOf(entry.getDN()), ex.getMessage()); throw new DirectoryException(ResultCode.INVALID_ATTRIBUTE_SYNTAX, message); } } } } return true; }
@Test public void createValueValid() { String value = UUID.randomUUID().toString(); AttributeValue av = attrib.createValue(value); assertThat(av.getValue(), is(equalTo(value))); }
public void translate(ClassGenerator classGen, MethodGenerator methodGen) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); if (!_isLiteral) { // if the ncname is an AVT, then the ncname has to be checked at runtime if it is a valid // ncname LocalVariableGen nameValue = methodGen.addLocalVariable2("nameValue", Util.getJCRefType(STRING_SIG), null); // store the name into a variable first so _name.translate only needs to be called once _name.translate(classGen, methodGen); nameValue.setStart(il.append(new ASTORE(nameValue.getIndex()))); il.append(new ALOAD(nameValue.getIndex())); // call checkNCName if the name is an AVT final int check = cpg.addMethodref(BASIS_LIBRARY_CLASS, "checkNCName", "(" + STRING_SIG + ")V"); il.append(new INVOKESTATIC(check)); // Save the current handler base on the stack il.append(methodGen.loadHandler()); il.append(DUP); // first arg to "attributes" call // load name value again nameValue.setEnd(il.append(new ALOAD(nameValue.getIndex()))); } else { // Save the current handler base on the stack il.append(methodGen.loadHandler()); il.append(DUP); // first arg to "attributes" call // Push attribute name _name.translate(classGen, methodGen); // 2nd arg } il.append(classGen.loadTranslet()); il.append( new GETFIELD( cpg.addFieldref(TRANSLET_CLASS, "stringValueHandler", STRING_VALUE_HANDLER_SIG))); il.append(DUP); il.append(methodGen.storeHandler()); // translate contents with substituted handler translateContents(classGen, methodGen); // get String out of the handler il.append( new INVOKEVIRTUAL( cpg.addMethodref(STRING_VALUE_HANDLER, "getValueOfPI", "()" + STRING_SIG))); // call "processingInstruction" final int processingInstruction = cpg.addInterfaceMethodref( TRANSLET_OUTPUT_INTERFACE, "processingInstruction", "(" + STRING_SIG + STRING_SIG + ")V"); il.append(new INVOKEINTERFACE(processingInstruction, 3)); // Restore old handler base from stack il.append(methodGen.storeHandler()); }
public AttributeValue getAttributeValue(String valueCode) { Iterator iterator = getValueList().iterator(); while (iterator.hasNext()) { AttributeValue attributeValue = (AttributeValue) iterator.next(); if (attributeValue.getValueCode().equals(valueCode)) return attributeValue; } return null; }
/** * Generates an entry for a backup directory based on the provided DN. The DN must contain an RDN * component that specifies the path to the backup directory, and that directory must exist and be * a valid backup directory. * * @param entryDN The DN of the backup directory entry to retrieve. * @return The requested backup directory entry. * @throws DirectoryException If the specified directory does not exist or is not a valid backup * directory, or if the DN does not specify any backup directory. */ private Entry getBackupDirectoryEntry(DN entryDN) throws DirectoryException { // Make sure that the DN specifies a backup directory. AttributeType t = DirectoryServer.getAttributeType(ATTR_BACKUP_DIRECTORY_PATH, true); AttributeValue v = entryDN.getRDN().getAttributeValue(t); if (v == null) { Message message = ERR_BACKUP_DN_DOES_NOT_SPECIFY_DIRECTORY.get(String.valueOf(entryDN)); throw new DirectoryException(ResultCode.CONSTRAINT_VIOLATION, message, backupBaseDN, null); } // Get a handle to the backup directory and the information that it // contains. BackupDirectory backupDirectory; try { backupDirectory = BackupDirectory.readBackupDirectoryDescriptor(v.getValue().toString()); } catch (ConfigException ce) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, ce); } Message message = ERR_BACKUP_INVALID_BACKUP_DIRECTORY.get(String.valueOf(entryDN), ce.getMessage()); throw new DirectoryException(ResultCode.CONSTRAINT_VIOLATION, message); } catch (Exception e) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, e); } Message message = ERR_BACKUP_ERROR_GETTING_BACKUP_DIRECTORY.get(getExceptionMessage(e)); throw new DirectoryException(DirectoryServer.getServerErrorResultCode(), message); } // Construct the backup directory entry to return. LinkedHashMap<ObjectClass, String> ocMap = new LinkedHashMap<ObjectClass, String>(2); ocMap.put(DirectoryServer.getTopObjectClass(), OC_TOP); ObjectClass backupDirOC = DirectoryServer.getObjectClass(OC_BACKUP_DIRECTORY, true); ocMap.put(backupDirOC, OC_BACKUP_DIRECTORY); LinkedHashMap<AttributeType, List<Attribute>> opAttrs = new LinkedHashMap<AttributeType, List<Attribute>>(0); LinkedHashMap<AttributeType, List<Attribute>> userAttrs = new LinkedHashMap<AttributeType, List<Attribute>>(3); ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); attrList.add(Attributes.create(t, v)); userAttrs.put(t, attrList); t = DirectoryServer.getAttributeType(ATTR_BACKUP_BACKEND_DN, true); attrList = new ArrayList<Attribute>(1); attrList.add( Attributes.create( t, AttributeValues.create(t, backupDirectory.getConfigEntryDN().toString()))); userAttrs.put(t, attrList); Entry e = new Entry(entryDN, ocMap, userAttrs, opAttrs); e.processVirtualAttributes(); return e; }
private void encodeV2Attributes( ByteStringBuilder buffer, Map<AttributeType, List<Attribute>> attributes, EntryEncodeConfig config) throws DirectoryException { int numAttributes = 0; // First count how many attributes are there to encode. for (List<Attribute> attrList : attributes.values()) { for (Attribute a : attrList) { if (a.isVirtual() || a.isEmpty()) { continue; } numAttributes++; } } // Encoded one-to-five byte number of attributes buffer.appendBERLength(numAttributes); if (config.compressAttributeDescriptions()) { for (List<Attribute> attrList : attributes.values()) { for (Attribute a : attrList) { if (a.isVirtual() || a.isEmpty()) { continue; } ByteStringBuilder bsb = new ByteStringBuilder(); config.getCompressedSchema().encodeAttribute(bsb, a); buffer.appendBERLength(bsb.length()); buffer.append(bsb); } } } else { // The attributes will be encoded as a sequence of: // - A UTF-8 byte representation of the attribute name. // - A zero delimiter // - A one-to-five byte number of values for the attribute // - A sequence of: // - A one-to-five byte length for the value // - A UTF-8 byte representation for the value for (List<Attribute> attrList : attributes.values()) { for (Attribute a : attrList) { byte[] nameBytes = getBytes(a.getNameWithOptions()); buffer.append(nameBytes); buffer.append((byte) 0x00); buffer.appendBERLength(a.size()); for (AttributeValue v : a) { buffer.appendBERLength(v.getValue().length()); buffer.append(v.getValue()); } } } } }
private void expandAttribute(ChainReader reader, String attribute) throws IOException { if (!getTemplate().hasAttribute(attribute)) { return; } List<AttributeValue> values = getTemplate().getAttributeValues(attribute); for (AttributeValue value : values) { reader.addReader(value.getReader()); } }
public String getType(ResizableArray<AttributeValue> leftBindAttrs) { StringBuilder buf = new StringBuilder(); buf.append(get()); for (int i = 0; i < leftBindAttrs.size(); i++) { AttributeValue av = leftBindAttrs.get(i); if (av == null) continue; buf.append(" ").append(av.get()); } addDefaultValue(buf); return buf.toString(); }
/** * Run type checks on the attributes; expression must return a string which we will use as a sort * key */ public Type typeCheck(SymbolTable stable) throws TypeCheckError { final Type tselect = _select.typeCheck(stable); // If the sort data-type is not set we use the natural data-type // of the data we will sort if (!(tselect instanceof StringType)) { _select = new CastExpr(_select, Type.String); } _order.typeCheck(stable); _caseOrder.typeCheck(stable); _dataType.typeCheck(stable); return Type.Void; }
/** * A utility method which may be used by implementations in order to obtain the value of the * specified attribute from the provided entry as a time in generalized time format. * * @param entry The entry whose attribute is to be parsed as a boolean. * @param attributeType The attribute type whose value should be parsed as a generalized time * value. * @return The requested time, or -1 if it could not be determined. * @throws DirectoryException If a problem occurs while attempting to decode the value as a * generalized time. */ protected static final long getGeneralizedTime( final Entry entry, final AttributeType attributeType) throws DirectoryException { long timeValue = -1; final List<Attribute> attrList = entry.getAttribute(attributeType); if (attrList != null) { for (final Attribute a : attrList) { if (a.isEmpty()) { continue; } final AttributeValue v = a.iterator().next(); try { timeValue = GeneralizedTimeSyntax.decodeGeneralizedTimeValue(v.getNormalizedValue()); } catch (final Exception e) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, e); TRACER.debugWarning( "Unable to decode value %s for attribute %s " + "in user entry %s: %s", v.getValue().toString(), attributeType.getNameOrOID(), entry.getDN().toString(), stackTraceToSingleLineString(e)); } final Message message = ERR_PWPSTATE_CANNOT_DECODE_GENERALIZED_TIME.get( v.getValue().toString(), attributeType.getNameOrOID(), entry.getDN().toString(), String.valueOf(e)); throw new DirectoryException(ResultCode.INVALID_ATTRIBUTE_SYNTAX, message, e); } break; } } if (timeValue == -1) { if (debugEnabled()) { TRACER.debugInfo( "Returning -1 because attribute %s does not " + "exist in user entry %s", attributeType.getNameOrOID(), entry.getDN().toString()); } } // FIXME: else to be consistent... return timeValue; }
public BigDecimal calculateHeaderEntropy( Map<Integer, AttributeValue> attributeValues, boolean withVariance) throws MLException { if (!withVariance) { return calculateHeaderEntropy(attributeValues); } else { BigDecimal headerEntropyresult = new BigDecimal(1); for (AttributeValue attributeValue : attributeValues.values()) { BigDecimal fraction = BigDecimal.valueOf(attributeValue.getAttributeValueCount()) .divide(new BigDecimal(size()), globalMathContext); headerEntropyresult.multiply(fraction, globalMathContext); } return headerEntropyresult; } }
public BigDecimal calculateHeaderEntropy(Map<Integer, AttributeValue> attributeValues) throws MLException { BigDecimal headerEntropyresult = new BigDecimal(0); for (AttributeValue attributeValue : attributeValues.values()) { BigDecimal fraction = BigDecimal.valueOf(attributeValue.getAttributeValueCount()) .divide(new BigDecimal(size()), globalMathContext); BigDecimal logFraction = BigDecimal.valueOf(Math.log(fraction.doubleValue())) .divide(new BigDecimal(Math.log(2)), globalMathContext); headerEntropyresult = headerEntropyresult.add(fraction.multiply(logFraction, globalMathContext)); } return headerEntropyresult.negate(); }
/** * Decorates the given <code>updateItemRequest</code> with attributes required for geo spatial * querying. * * @param attributeValueMap the items that needs to be decorated with geo attributes * @param latitude the latitude that needs to be attached with the item * @param longitude the longitude that needs to be attached with the item * @param configs the collection of configurations to be used for decorating the request with geo * attributes */ public void updateAttributeValues( Map<String, AttributeValue> attributeValueMap, double latitude, double longitude, List<GeoConfig> configs) { if (configs == null) { throw new IllegalArgumentException("Geo configs should not be null"); } for (GeoConfig config : configs) { // Fail-fast if any of the preconditions fail checkConfigParams( config.getGeoIndexName(), config.getGeoHashKeyColumn(), config.getGeoHashColumn(), config.getGeoHashKeyLength()); long geohash = s2Manager.generateGeohash(latitude, longitude); long geoHashKey = s2Manager.generateHashKey(geohash, config.getGeoHashKeyLength()); // Decorate the request with the geohash AttributeValue geoHashValue = new AttributeValue().withN(Long.toString(geohash)); attributeValueMap.put(config.getGeoHashColumn(), geoHashValue); AttributeValue geoHashKeyValue; if (config.getHashKeyDecorator().isPresent() && config.getCompositeHashKeyColumn().isPresent()) { AttributeValue compositeHashKeyValue = attributeValueMap.get(config.getCompositeHashKeyColumn().get()); if (compositeHashKeyValue == null) { continue; } String compositeColumnValue = compositeHashKeyValue.getS(); String hashKey = config.getHashKeyDecorator().get().decorate(compositeColumnValue, geoHashKey); // Decorate the request with the composite geoHashKey (type String) geoHashKeyValue = new AttributeValue().withS(String.valueOf(hashKey)); } else { // Decorate the request with the geoHashKey (type Number) geoHashKeyValue = new AttributeValue().withN(String.valueOf(geoHashKey)); } attributeValueMap.put(config.getGeoHashKeyColumn(), geoHashKeyValue); } }
/** Parse the attributes of the xsl:sort element */ public void parseContents(Parser parser) { final SyntaxTreeNode parent = getParent(); if (!(parent instanceof ApplyTemplates) && !(parent instanceof ForEach)) { reportError(this, parser, ErrorMsg.STRAY_SORT_ERR, null); return; } // Parse the select expression (node string value if no expression) _select = parser.parseExpression(this, "select", "string(.)"); // Get the sort order; default is 'ascending' String val = getAttribute("order"); if (val.length() == 0) val = "ascending"; _order = AttributeValue.create(this, val, parser); // Get the sort data type; default is text val = getAttribute("data-type"); if (val.length() == 0) { try { final Type type = _select.typeCheck(parser.getSymbolTable()); if (type instanceof IntType) val = "number"; else val = "text"; } catch (TypeCheckError e) { val = "text"; } } _dataType = AttributeValue.create(this, val, parser); _lang = getAttribute("lang"); // bug! see 26869 // val = getAttribute("lang"); // _lang = AttributeValue.create(this, val, parser); // Get the case order; default is language dependant val = getAttribute("case-order"); _caseOrder = AttributeValue.create(this, val, parser); }
/** {@inheritDoc} */ @Override() public boolean hasValue(Entry entry, VirtualAttributeRule rule, AttributeValue value) { Backend backend = DirectoryServer.getBackend(entry.getDN()); try { ConditionResult ret = backend.hasSubordinates(entry.getDN()); return ret != null && ret != ConditionResult.UNDEFINED && ConditionResult.valueOf(value.getNormalizedValue().toString()).equals(ret); } catch (DirectoryException de) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, de); } return false; } }
/** * Add an enforced attribute to a tag. An enforced attribute will always be added to the element. * If the element already has the attribute set, it will be overridden. * * <p>E.g.: <code>addEnforcedAttribute("a", "rel", "nofollow")</code> will make all <code>a</code> * tags output as <code><a href="..." rel="nofollow"></code> * * @param tag The tag the enforced attribute is for. The tag will be added to the allowed tag list * if necessary. * @param key The attribute key * @param value The enforced attribute value * @return this (for chaining) */ public Whitelist addEnforcedAttribute(String tag, String key, String value) { Validate.notEmpty(tag); Validate.notEmpty(key); Validate.notEmpty(value); TagName tagName = TagName.valueOf(tag); if (!tagNames.contains(tagName)) tagNames.add(tagName); AttributeKey attrKey = AttributeKey.valueOf(key); AttributeValue attrVal = AttributeValue.valueOf(value); if (enforcedAttributes.containsKey(tagName)) { enforcedAttributes.get(tagName).put(attrKey, attrVal); } else { Map<AttributeKey, AttributeValue> attrMap = new HashMap<AttributeKey, AttributeValue>(); attrMap.put(attrKey, attrVal); enforcedAttributes.put(tagName, attrMap); } return this; }
public void parseContents(Parser parser) { final String name = getAttribute("name"); if (name.length() > 0) { _isLiteral = Util.isLiteral(name); if (_isLiteral) { if (!XML11Char.isXML11ValidNCName(name)) { ErrorMsg err = new ErrorMsg(ErrorMsg.INVALID_NCNAME_ERR, name, this); parser.reportError(Constants.ERROR, err); } } _name = AttributeValue.create(this, name, parser); } else reportError(this, parser, ErrorMsg.REQUIRED_ATTR_ERR, "name"); if (name.equals("xml")) { reportError(this, parser, ErrorMsg.ILLEGAL_PI_ERR, "xml"); } parseChildren(parser); }
private static ColumnDecl buildDecl(DataType dt, List<AttributeValue> attrs) { StringBuilder buf = new StringBuilder(); List<AttributeValue> attrApplied = new ArrayList<AttributeValue>(); ResizableArray<AttributeValue> leftBind = new ResizableArray<AttributeValue>(); if (attrs != null) { for (AttributeValue p : attrs) { if (p.canApply(dt, attrApplied)) { if (p.getAttribute().getType().getOrder() > -1) leftBind.set(p.getAttribute().getType().getOrder(), p); attrApplied.add(p); } } } buf.append(dt.getType(leftBind)); for (AttributeValue av : attrApplied) { if (av.isOrdered()) continue; buf.append(" ").append(av.get()); } return new ColumnDecl(dt, attrApplied, buf.toString()); }
public Type typeCheck(SymbolTable stable) throws TypeCheckError { _name.typeCheck(stable); typeCheckContents(stable); return Type.Void; }
public Request<BatchGetItemRequest> marshall(BatchGetItemRequest batchGetItemRequest) { if (batchGetItemRequest == null) { throw new AmazonClientException("Invalid argument passed to marshall(...)"); } Request<BatchGetItemRequest> request = new DefaultRequest<BatchGetItemRequest>(batchGetItemRequest, "AmazonDynamoDB"); String target = "DynamoDB_20111205.BatchGetItem"; request.addHeader("X-Amz-Target", target); request.addHeader("Content-Type", "application/x-amz-json-1.0"); request.setHttpMethod(HttpMethodName.POST); String uriResourcePath = ""; uriResourcePath = uriResourcePath.replaceAll("//", "/"); if (uriResourcePath.contains("?")) { String queryString = uriResourcePath.substring(uriResourcePath.indexOf("?") + 1); uriResourcePath = uriResourcePath.substring(0, uriResourcePath.indexOf("?")); for (String s : queryString.split("[;&]")) { String[] nameValuePair = s.split("="); if (nameValuePair.length == 2) { request.addParameter(nameValuePair[0], nameValuePair[1]); } else { request.addParameter(s, null); } } } request.setResourcePath(uriResourcePath); try { StringWriter stringWriter = new StringWriter(); JSONWriter jsonWriter = new JSONWriter(stringWriter); jsonWriter.object(); if (batchGetItemRequest.getRequestItems() != null) { jsonWriter.key("RequestItems"); jsonWriter.object(); for (Map.Entry<String, KeysAndAttributes> requestItemsListValue : batchGetItemRequest.getRequestItems().entrySet()) { if (requestItemsListValue.getValue() != null) { jsonWriter.key(requestItemsListValue.getKey()); jsonWriter.object(); java.util.List<Key> keysList = requestItemsListValue.getValue().getKeys(); if (keysList != null && keysList.size() > 0) { jsonWriter.key("Keys"); jsonWriter.array(); for (Key keysListValue : keysList) { if (keysListValue != null) { jsonWriter.object(); AttributeValue hashKeyElement = keysListValue.getHashKeyElement(); if (hashKeyElement != null) { jsonWriter.key("HashKeyElement"); jsonWriter.object(); if (hashKeyElement.getS() != null) { jsonWriter.key("S").value(hashKeyElement.getS()); } if (hashKeyElement.getN() != null) { jsonWriter.key("N").value(hashKeyElement.getN()); } if (hashKeyElement.getB() != null) { jsonWriter.key("B").value(hashKeyElement.getB()); } java.util.List<String> sSList = hashKeyElement.getSS(); if (sSList != null && sSList.size() > 0) { jsonWriter.key("SS"); jsonWriter.array(); for (String sSListValue : sSList) { if (sSListValue != null) { jsonWriter.value(sSListValue); } } jsonWriter.endArray(); } java.util.List<String> nSList = hashKeyElement.getNS(); if (nSList != null && nSList.size() > 0) { jsonWriter.key("NS"); jsonWriter.array(); for (String nSListValue : nSList) { if (nSListValue != null) { jsonWriter.value(nSListValue); } } jsonWriter.endArray(); } java.util.List<java.nio.ByteBuffer> bSList = hashKeyElement.getBS(); if (bSList != null && bSList.size() > 0) { jsonWriter.key("BS"); jsonWriter.array(); for (java.nio.ByteBuffer bSListValue : bSList) { if (bSListValue != null) { jsonWriter.value(bSListValue); } } jsonWriter.endArray(); } jsonWriter.endObject(); } AttributeValue rangeKeyElement = keysListValue.getRangeKeyElement(); if (rangeKeyElement != null) { jsonWriter.key("RangeKeyElement"); jsonWriter.object(); if (rangeKeyElement.getS() != null) { jsonWriter.key("S").value(rangeKeyElement.getS()); } if (rangeKeyElement.getN() != null) { jsonWriter.key("N").value(rangeKeyElement.getN()); } if (rangeKeyElement.getB() != null) { jsonWriter.key("B").value(rangeKeyElement.getB()); } java.util.List<String> sSList = rangeKeyElement.getSS(); if (sSList != null && sSList.size() > 0) { jsonWriter.key("SS"); jsonWriter.array(); for (String sSListValue : sSList) { if (sSListValue != null) { jsonWriter.value(sSListValue); } } jsonWriter.endArray(); } java.util.List<String> nSList = rangeKeyElement.getNS(); if (nSList != null && nSList.size() > 0) { jsonWriter.key("NS"); jsonWriter.array(); for (String nSListValue : nSList) { if (nSListValue != null) { jsonWriter.value(nSListValue); } } jsonWriter.endArray(); } java.util.List<java.nio.ByteBuffer> bSList = rangeKeyElement.getBS(); if (bSList != null && bSList.size() > 0) { jsonWriter.key("BS"); jsonWriter.array(); for (java.nio.ByteBuffer bSListValue : bSList) { if (bSListValue != null) { jsonWriter.value(bSListValue); } } jsonWriter.endArray(); } jsonWriter.endObject(); } jsonWriter.endObject(); } } jsonWriter.endArray(); } java.util.List<String> attributesToGetList = requestItemsListValue.getValue().getAttributesToGet(); if (attributesToGetList != null && attributesToGetList.size() > 0) { jsonWriter.key("AttributesToGet"); jsonWriter.array(); for (String attributesToGetListValue : attributesToGetList) { if (attributesToGetListValue != null) { jsonWriter.value(attributesToGetListValue); } } jsonWriter.endArray(); } if (requestItemsListValue.getValue().isConsistentRead() != null) { jsonWriter .key("ConsistentRead") .value(requestItemsListValue.getValue().isConsistentRead()); } jsonWriter.endObject(); } } jsonWriter.endObject(); } jsonWriter.endObject(); String snippet = stringWriter.toString(); byte[] content = snippet.getBytes("UTF-8"); request.setContent(new StringInputStream(snippet)); request.addHeader("Content-Length", Integer.toString(content.length)); } catch (Throwable t) { throw new AmazonClientException("Unable to marshall request to JSON: " + t.getMessage(), t); } return request; }
/** {@inheritDoc} */ @Override public long numSubordinates(DN entryDN, boolean subtree) throws DirectoryException { // If the requested entry was null, then return undefined. if (entryDN == null) { return -1; } // If the requested entry was the backend base entry, then return // the number of backup directories. if (backupBaseDN.equals(entryDN)) { long count = 0; for (File f : backupDirectories) { // Check to see if the descriptor file exists. If not, then skip this // backup directory. File descriptorFile = new File(f, BACKUP_DIRECTORY_DESCRIPTOR_FILE); if (!descriptorFile.exists()) { continue; } // If subtree is included, count the number of entries for each // backup directory. if (subtree) { try { BackupDirectory backupDirectory = BackupDirectory.readBackupDirectoryDescriptor(f.getPath()); count += backupDirectory.getBackups().keySet().size(); } catch (Exception e) { return -1; } } count++; } return count; } // See if the requested entry was one level below the backend base entry. // If so, then it must point to a backup directory. Otherwise, it must be // two levels below the backup base entry and must point to a specific // backup. DN parentDN = entryDN.getParentDNInSuffix(); if (parentDN == null) { return -1; } else if (backupBaseDN.equals(parentDN)) { long count = 0; Entry backupDirEntry = getBackupDirectoryEntry(entryDN); AttributeType t = DirectoryServer.getAttributeType(ATTR_BACKUP_DIRECTORY_PATH, true); List<Attribute> attrList = backupDirEntry.getAttribute(t); if (attrList != null && !attrList.isEmpty()) { for (AttributeValue v : attrList.get(0)) { try { BackupDirectory backupDirectory = BackupDirectory.readBackupDirectoryDescriptor(v.getValue().toString()); count += backupDirectory.getBackups().keySet().size(); } catch (Exception e) { return -1; } } } return count; } else if (backupBaseDN.equals(parentDN.getParentDNInSuffix())) { return 0; } else { return -1; } }
/** * Checks to see if a LDAP modification is allowed access. * * @param container The structure containing the LDAP modifications * @param operation The operation to check modify privileges on. operation to check and the * evaluation context to apply the check against. * @param skipAccessCheck True if access checking should be skipped. * @return True if access is allowed. * @throws DirectoryException If a modified ACI could not be decoded. */ private boolean aciCheckMods( AciLDAPOperationContainer container, LocalBackendModifyOperation operation, boolean skipAccessCheck) throws DirectoryException { Entry resourceEntry = container.getResourceEntry(); DN dn = resourceEntry.getDN(); List<Modification> modifications = operation.getModifications(); for (Modification m : modifications) { Attribute modAttr = m.getAttribute(); AttributeType modAttrType = modAttr.getAttributeType(); if (modAttrType.equals(aciType)) { /* * Check that the operation has modify privileges if it contains * an "aci" attribute type. */ if (!operation.getClientConnection().hasPrivilege(Privilege.MODIFY_ACL, operation)) { Message message = INFO_ACI_MODIFY_FAILED_PRIVILEGE.get( String.valueOf(container.getResourceDN()), String.valueOf(container.getClientDN())); logError(message); return false; } } // This access check handles the case where all attributes of this // type are being replaced or deleted. If only a subset is being // deleted than this access check is skipped. ModificationType modType = m.getModificationType(); if (((modType == ModificationType.DELETE) && modAttr.isEmpty()) || ((modType == ModificationType.REPLACE) || (modType == ModificationType.INCREMENT))) { /* * Check if we have rights to delete all values of an attribute * type in the resource entry. */ if (resourceEntry.hasAttribute(modAttrType)) { container.setCurrentAttributeType(modAttrType); List<Attribute> attrList = resourceEntry.getAttribute(modAttrType, modAttr.getOptions()); if (attrList != null) { for (Attribute a : attrList) { for (AttributeValue v : a) { container.setCurrentAttributeValue(v); container.setRights(ACI_WRITE_DELETE); if (!skipAccessCheck && !accessAllowed(container)) { return false; } } } } } } if (!modAttr.isEmpty()) { for (AttributeValue v : modAttr) { container.setCurrentAttributeType(modAttrType); switch (m.getModificationType()) { case ADD: case REPLACE: container.setCurrentAttributeValue(v); container.setRights(ACI_WRITE_ADD); if (!skipAccessCheck && !accessAllowed(container)) { return false; } break; case DELETE: container.setCurrentAttributeValue(v); container.setRights(ACI_WRITE_DELETE); if (!skipAccessCheck && !accessAllowed(container)) { return false; } break; case INCREMENT: Entry modifiedEntry = operation.getModifiedEntry(); List<Attribute> modifiedAttrs = modifiedEntry.getAttribute(modAttrType, modAttr.getOptions()); if (modifiedAttrs != null) { for (Attribute attr : modifiedAttrs) { for (AttributeValue val : attr) { container.setCurrentAttributeValue(val); container.setRights(ACI_WRITE_ADD); if (!skipAccessCheck && !accessAllowed(container)) { return false; } } } } break; } /* * Check if the modification type has an "aci" attribute type. * If so, check the syntax of that attribute value. Fail the * the operation if the syntax check fails. */ if (modAttrType.equals(aciType) || modAttrType.equals(globalAciType)) { try { // A global ACI needs a NULL DN, not the DN of the // modification. if (modAttrType.equals(globalAciType)) { dn = DN.nullDN(); } Aci.decode(v.getValue(), dn); } catch (AciException ex) { Message message = WARN_ACI_MODIFY_FAILED_DECODE.get(String.valueOf(dn), ex.getMessage()); throw new DirectoryException(ResultCode.INVALID_ATTRIBUTE_SYNTAX, message); } } } } } return true; }
/** * Generates an entry for a backup based on the provided DN. The DN must have an RDN component * that specifies the backup ID, and the parent DN must have an RDN component that specifies the * backup directory. * * @param entryDN The DN of the backup entry to retrieve. * @return The requested backup entry. * @throws DirectoryException If the specified backup does not exist or is invalid. */ private Entry getBackupEntry(DN entryDN) throws DirectoryException { // First, get the backup ID from the entry DN. AttributeType idType = DirectoryServer.getAttributeType(ATTR_BACKUP_ID, true); AttributeValue idValue = entryDN.getRDN().getAttributeValue(idType); if (idValue == null) { Message message = ERR_BACKUP_NO_BACKUP_ID_IN_DN.get(String.valueOf(entryDN)); throw new DirectoryException(ResultCode.CONSTRAINT_VIOLATION, message); } String backupID = idValue.getValue().toString(); // Next, get the backup directory from the parent DN. DN parentDN = entryDN.getParentDNInSuffix(); if (parentDN == null) { Message message = ERR_BACKUP_NO_BACKUP_PARENT_DN.get(String.valueOf(entryDN)); throw new DirectoryException(ResultCode.CONSTRAINT_VIOLATION, message); } AttributeType t = DirectoryServer.getAttributeType(ATTR_BACKUP_DIRECTORY_PATH, true); AttributeValue v = parentDN.getRDN().getAttributeValue(t); if (v == null) { Message message = ERR_BACKUP_NO_BACKUP_DIR_IN_DN.get(String.valueOf(entryDN)); throw new DirectoryException(ResultCode.CONSTRAINT_VIOLATION, message); } BackupDirectory backupDirectory; try { backupDirectory = BackupDirectory.readBackupDirectoryDescriptor(v.getValue().toString()); } catch (ConfigException ce) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, ce); } Message message = ERR_BACKUP_INVALID_BACKUP_DIRECTORY.get(String.valueOf(entryDN), ce.getMessageObject()); throw new DirectoryException(ResultCode.CONSTRAINT_VIOLATION, message); } catch (Exception e) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, e); } Message message = ERR_BACKUP_ERROR_GETTING_BACKUP_DIRECTORY.get(getExceptionMessage(e)); throw new DirectoryException(DirectoryServer.getServerErrorResultCode(), message); } BackupInfo backupInfo = backupDirectory.getBackupInfo(backupID); if (backupInfo == null) { Message message = ERR_BACKUP_NO_SUCH_BACKUP.get(backupID, backupDirectory.getPath()); throw new DirectoryException(ResultCode.NO_SUCH_OBJECT, message, parentDN, null); } // Construct the backup entry to return. LinkedHashMap<ObjectClass, String> ocMap = new LinkedHashMap<ObjectClass, String>(3); ocMap.put(DirectoryServer.getTopObjectClass(), OC_TOP); ObjectClass oc = DirectoryServer.getObjectClass(OC_BACKUP_INFO, true); ocMap.put(oc, OC_BACKUP_INFO); oc = DirectoryServer.getObjectClass(OC_EXTENSIBLE_OBJECT_LC, true); ocMap.put(oc, OC_EXTENSIBLE_OBJECT); LinkedHashMap<AttributeType, List<Attribute>> opAttrs = new LinkedHashMap<AttributeType, List<Attribute>>(0); LinkedHashMap<AttributeType, List<Attribute>> userAttrs = new LinkedHashMap<AttributeType, List<Attribute>>(); ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); attrList.add(Attributes.create(idType, idValue)); userAttrs.put(idType, attrList); backupInfo.getBackupDirectory(); attrList = new ArrayList<Attribute>(1); attrList.add(Attributes.create(t, v)); userAttrs.put(t, attrList); Date backupDate = backupInfo.getBackupDate(); if (backupDate != null) { t = DirectoryServer.getAttributeType(ATTR_BACKUP_DATE, true); attrList = new ArrayList<Attribute>(1); attrList.add( Attributes.create( t, AttributeValues.create(t, GeneralizedTimeSyntax.format(backupDate)))); userAttrs.put(t, attrList); } t = DirectoryServer.getAttributeType(ATTR_BACKUP_COMPRESSED, true); attrList = new ArrayList<Attribute>(1); attrList.add(Attributes.create(t, BooleanSyntax.createBooleanValue(backupInfo.isCompressed()))); userAttrs.put(t, attrList); t = DirectoryServer.getAttributeType(ATTR_BACKUP_ENCRYPTED, true); attrList = new ArrayList<Attribute>(1); attrList.add(Attributes.create(t, BooleanSyntax.createBooleanValue(backupInfo.isEncrypted()))); userAttrs.put(t, attrList); t = DirectoryServer.getAttributeType(ATTR_BACKUP_INCREMENTAL, true); attrList = new ArrayList<Attribute>(1); attrList.add( Attributes.create(t, BooleanSyntax.createBooleanValue(backupInfo.isIncremental()))); userAttrs.put(t, attrList); HashSet<String> dependencies = backupInfo.getDependencies(); if (dependencies != null && !dependencies.isEmpty()) { t = DirectoryServer.getAttributeType(ATTR_BACKUP_DEPENDENCY, true); AttributeBuilder builder = new AttributeBuilder(t); for (String s : dependencies) { builder.add(AttributeValues.create(t, s)); } attrList = new ArrayList<Attribute>(1); attrList.add(builder.toAttribute()); userAttrs.put(t, attrList); } byte[] signedHash = backupInfo.getSignedHash(); if (signedHash != null) { t = DirectoryServer.getAttributeType(ATTR_BACKUP_SIGNED_HASH, true); attrList = new ArrayList<Attribute>(1); attrList.add(Attributes.create(t, AttributeValues.create(t, ByteString.wrap(signedHash)))); userAttrs.put(t, attrList); } byte[] unsignedHash = backupInfo.getUnsignedHash(); if (unsignedHash != null) { t = DirectoryServer.getAttributeType(ATTR_BACKUP_UNSIGNED_HASH, true); attrList = new ArrayList<Attribute>(1); attrList.add(Attributes.create(t, AttributeValues.create(t, ByteString.wrap(unsignedHash)))); userAttrs.put(t, attrList); } HashMap<String, String> properties = backupInfo.getBackupProperties(); if (properties != null && !properties.isEmpty()) { for (Map.Entry<String, String> e : properties.entrySet()) { t = DirectoryServer.getAttributeType(toLowerCase(e.getKey()), true); attrList = new ArrayList<Attribute>(1); attrList.add(Attributes.create(t, AttributeValues.create(t, e.getValue()))); userAttrs.put(t, attrList); } } Entry e = new Entry(entryDN, ocMap, userAttrs, opAttrs); e.processVirtualAttributes(); return e; }
public void translateCaseOrder(ClassGenerator classGen, MethodGenerator methodGen) { _caseOrder.translate(classGen, methodGen); }
public void translateSortOrder(ClassGenerator classGen, MethodGenerator methodGen) { _order.translate(classGen, methodGen); }
/** * These two methods are needed in the static methods that compile the overloaded * NodeSortRecord.compareType() and NodeSortRecord.sortOrder() */ public void translateSortType(ClassGenerator classGen, MethodGenerator methodGen) { _dataType.translate(classGen, methodGen); }
/** {@inheritDoc} */ @Override public void search(SearchOperation searchOperation) throws DirectoryException { // Get the base entry for the search, if possible. If it doesn't exist, // then this will throw an exception. DN baseDN = searchOperation.getBaseDN(); Entry baseEntry = getEntry(baseDN); // Look at the base DN and see if it's the backup base DN, a backup // directory entry DN, or a backup entry DN. DN parentDN; SearchScope scope = searchOperation.getScope(); SearchFilter filter = searchOperation.getFilter(); if (backupBaseDN.equals(baseDN)) { if ((scope == SearchScope.BASE_OBJECT || scope == SearchScope.WHOLE_SUBTREE) && filter.matchesEntry(baseEntry)) { searchOperation.returnEntry(baseEntry, null); } if (scope != SearchScope.BASE_OBJECT && !backupDirectories.isEmpty()) { AttributeType backupPathType = DirectoryServer.getAttributeType(ATTR_BACKUP_DIRECTORY_PATH, true); for (File f : backupDirectories) { // Check to see if the descriptor file exists. If not, then skip this // backup directory. File descriptorFile = new File(f, BACKUP_DIRECTORY_DESCRIPTOR_FILE); if (!descriptorFile.exists()) { continue; } DN backupDirDN = makeChildDN(backupBaseDN, backupPathType, f.getAbsolutePath()); Entry backupDirEntry; try { backupDirEntry = getBackupDirectoryEntry(backupDirDN); } catch (Exception e) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, e); } continue; } if (filter.matchesEntry(backupDirEntry)) { searchOperation.returnEntry(backupDirEntry, null); } if (scope != SearchScope.SINGLE_LEVEL) { List<Attribute> attrList = backupDirEntry.getAttribute(backupPathType); if (attrList != null && !attrList.isEmpty()) { for (AttributeValue v : attrList.get(0)) { try { BackupDirectory backupDirectory = BackupDirectory.readBackupDirectoryDescriptor(v.getValue().toString()); AttributeType idType = DirectoryServer.getAttributeType(ATTR_BACKUP_ID, true); for (String backupID : backupDirectory.getBackups().keySet()) { DN backupEntryDN = makeChildDN(backupDirDN, idType, backupID); Entry backupEntry = getBackupEntry(backupEntryDN); if (filter.matchesEntry(backupEntry)) { searchOperation.returnEntry(backupEntry, null); } } } catch (Exception e) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, e); } continue; } } } } } } } else if (backupBaseDN.equals(parentDN = baseDN.getParentDNInSuffix())) { Entry backupDirEntry = getBackupDirectoryEntry(baseDN); if ((scope == SearchScope.BASE_OBJECT || scope == SearchScope.WHOLE_SUBTREE) && filter.matchesEntry(backupDirEntry)) { searchOperation.returnEntry(backupDirEntry, null); } if (scope != SearchScope.BASE_OBJECT) { AttributeType t = DirectoryServer.getAttributeType(ATTR_BACKUP_DIRECTORY_PATH, true); List<Attribute> attrList = backupDirEntry.getAttribute(t); if (attrList != null && !attrList.isEmpty()) { for (AttributeValue v : attrList.get(0)) { try { BackupDirectory backupDirectory = BackupDirectory.readBackupDirectoryDescriptor(v.getValue().toString()); AttributeType idType = DirectoryServer.getAttributeType(ATTR_BACKUP_ID, true); for (String backupID : backupDirectory.getBackups().keySet()) { DN backupEntryDN = makeChildDN(baseDN, idType, backupID); Entry backupEntry = getBackupEntry(backupEntryDN); if (filter.matchesEntry(backupEntry)) { searchOperation.returnEntry(backupEntry, null); } } } catch (Exception e) { if (debugEnabled()) { TRACER.debugCaught(DebugLogLevel.ERROR, e); } continue; } } } } } else { if (parentDN == null || !backupBaseDN.equals(parentDN.getParentDNInSuffix())) { Message message = ERR_BACKUP_NO_SUCH_ENTRY.get(String.valueOf(backupBaseDN)); throw new DirectoryException(ResultCode.NO_SUCH_OBJECT, message); } if (scope == SearchScope.BASE_OBJECT || scope == SearchScope.WHOLE_SUBTREE) { Entry backupEntry = getBackupEntry(baseDN); if (backupEntry == null) { Message message = ERR_BACKUP_NO_SUCH_ENTRY.get(String.valueOf(backupBaseDN)); throw new DirectoryException(ResultCode.NO_SUCH_OBJECT, message); } if (filter.matchesEntry(backupEntry)) { searchOperation.returnEntry(backupEntry, null); } } } }