private void writeIndexes(XMLExtendedStreamWriter writer, ModelNode repository) throws XMLStreamException { if (has(repository, ModelKeys.INDEX)) { writer.writeStartElement(Element.INDEXES.getLocalName()); ModelNode providerNode = repository.get(ModelKeys.INDEX); for (Property index : providerNode.asPropertyList()) { writer.writeStartElement(Element.INDEX.getLocalName()); writer.writeAttribute(Attribute.NAME.getLocalName(), index.getName()); ModelNode prop = index.getValue(); ModelAttributes.PROVIDER_NAME.marshallAsAttribute(prop, writer); ModelAttributes.INDEX_KIND.marshallAsAttribute(prop, writer); ModelAttributes.SYNCHRONOUS.marshallAsAttribute(prop, writer); ModelAttributes.NODE_TYPE_NAME.marshallAsAttribute(prop, writer); ModelAttributes.INDEX_COLUMNS.marshallAsAttribute(prop, writer); // Write out the extra properties ... if (has(prop, ModelKeys.PROPERTIES)) { ModelNode properties = prop.get(ModelKeys.PROPERTIES); for (Property property : properties.asPropertyList()) { writer.writeAttribute(property.getName(), property.getValue().asString()); } } writer.writeEndElement(); } writer.writeEndElement(); } }
@Override public void writeContent(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException { context.startSubsystemElement(CURRENT.getUriString(), false); final ModelNode node = context.getModelNode(); if (node.hasDefined(HORNETQ_SERVER)) { final ModelNode servers = node.get(HORNETQ_SERVER); boolean first = true; for (Property prop : servers.asPropertyList()) { writeHornetQServer(writer, prop.getName(), prop.getValue()); if (!first) { writeNewLine(writer); } else { first = false; } } } if (node.hasDefined(JMS_BRIDGE)) { final ModelNode jmsBridges = node.get(JMS_BRIDGE); boolean first = true; for (Property prop : jmsBridges.asPropertyList()) { writeJmsBridge(writer, prop.getName(), prop.getValue()); if (!first) { writeNewLine(writer); } else { first = false; } } } writer.writeEndElement(); }
private static void writeConnectorServices(XMLExtendedStreamWriter writer, ModelNode node) throws XMLStreamException { if (!node.isDefined()) { return; } List<Property> properties = node.asPropertyList(); if (!properties.isEmpty()) { writer.writeStartElement(Element.CONNECTOR_SERVICES.getLocalName()); for (final Property property : node.asPropertyList()) { writer.writeStartElement(Element.CONNECTOR_SERVICE.getLocalName()); writer.writeAttribute(Attribute.NAME.getLocalName(), property.getName()); final ModelNode service = property.getValue(); for (AttributeDefinition attribute : ConnectorServiceDefinition.ATTRIBUTES) { attribute.marshallAsElement(property.getValue(), writer); } // TODO use a custom attribute marshaller if (service.hasDefined(CommonAttributes.PARAM)) { for (Property param : service.get(CommonAttributes.PARAM).asPropertyList()) { writer.writeEmptyElement(Element.PARAM.getLocalName()); writer.writeAttribute(Attribute.KEY.getLocalName(), param.getName()); writer.writeAttribute( Attribute.VALUE.getLocalName(), param.getValue().get(ConnectorServiceParamDefinition.VALUE.getName()).asString()); } } writer.writeEndElement(); } writer.writeEndElement(); writeNewLine(writer); } }
/* * Tests access to local cache attributes */ @SuppressWarnings("deprecation") @Test public void testDistributedCacheMixedJDBCStoreReadWriteOperation() throws Exception { ModelNode stringKeyedTable = createStringKeyedTable(); // Parse and install the XML into the controller String subsystemXml = getSubsystemXml(); KernelServices servicesA = this.createKernelServicesBuilder().setSubsystemXml(subsystemXml).build(); // read the distributed cache mixed-keyed-jdbc-store datasource attribute ModelNode result = servicesA.executeOperation( getMixedKeyedJDBCCacheStoreReadOperation( "maximal", DistributedCacheResourceDefinition.WILDCARD_PATH.getKey(), "dist", JDBCStoreResourceDefinition.Attribute.DATA_SOURCE)); Assert.assertEquals(result.toString(), SUCCESS, result.get(OUTCOME).asString()); Assert.assertEquals("ExampleDS", result.get(RESULT).asString()); // write the batching attribute result = servicesA.executeOperation( getMixedKeyedJDBCCacheStoreWriteOperation( "maximal", DistributedCacheResourceDefinition.WILDCARD_PATH.getKey(), "dist", JDBCStoreResourceDefinition.Attribute.DATA_SOURCE, "new-datasource")); Assert.assertEquals(result.toString(), SUCCESS, result.get(OUTCOME).asString()); // re-read the batching attribute result = servicesA.executeOperation( getMixedKeyedJDBCCacheStoreReadOperation( "maximal", DistributedCacheResourceDefinition.WILDCARD_PATH.getKey(), "dist", JDBCStoreResourceDefinition.Attribute.DATA_SOURCE)); Assert.assertEquals(result.toString(), SUCCESS, result.get(OUTCOME).asString()); Assert.assertEquals("new-datasource", result.get(RESULT).asString()); // read the string-keyed-table attribute result = servicesA.executeOperation( getMixedKeyedJDBCCacheStoreReadOperation( "maximal", DistributedCacheResourceDefinition.WILDCARD_PATH.getKey(), "dist", MixedKeyedJDBCStoreResourceDefinition.DeprecatedAttribute.STRING_TABLE)); Assert.assertEquals(result.toString(), SUCCESS, result.get(OUTCOME).asString()); Assert.assertEquals( stringKeyedTable.asPropertyList().size(), result.get(RESULT).asPropertyList().size()); for (Property property : stringKeyedTable.asPropertyList()) { Assert.assertTrue(result.get(RESULT).hasDefined(property.getName())); Assert.assertEquals(property.getValue(), result.get(RESULT).get(property.getName())); } }
// For any request params that are of type BYTES, replace the file path with the bytes from the // file private boolean replaceFilePathsWithBytes(ModelNode request) throws CommandFormatException, IOException { boolean didReplacement = false; ModelNode opDesc = new ModelNode(); opDesc.get("address").set(request.get("address")); opDesc.get("operation").set("read-operation-description"); final String opName = request.get("operation").asString(); opDesc.get("name").set(opName); ModelNode response = execute(opDesc, false).getResponseNode(); if (response.hasDefined("result", "request-properties")) { final ModelNode requestProps = response.get("result", "request-properties"); for (Property prop : requestProps.asPropertyList()) { ModelNode typeDesc = prop.getValue().get("type"); if (typeDesc.getType() == ModelType.TYPE && typeDesc.asType() == ModelType.BYTES && request.hasDefined(prop.getName())) { String filePath = request.get(prop.getName()).asString(); File localFile = new File(filePath); if (!localFile.exists()) continue; try { request.get(prop.getName()).set(Util.readBytes(localFile)); didReplacement = true; } catch (OperationFormatException e) { throw new CommandFormatException(e); } } } } return didReplacement; }
private void writeIndexProviders(XMLExtendedStreamWriter writer, ModelNode repository) throws XMLStreamException { if (has(repository, ModelKeys.INDEX_PROVIDER)) { writer.writeStartElement(Element.INDEX_PROVIDERS.getLocalName()); ModelNode providerNode = repository.get(ModelKeys.INDEX_PROVIDER); for (Property provider : providerNode.asPropertyList()) { writer.writeStartElement(Element.INDEX_PROVIDER.getLocalName()); writer.writeAttribute(Attribute.NAME.getLocalName(), provider.getName()); ModelNode prop = provider.getValue(); ModelAttributes.CLASSNAME.marshallAsAttribute(prop, writer); ModelAttributes.MODULE.marshallAsAttribute(prop, writer); ModelAttributes.RELATIVE_TO.marshallAsAttribute(prop, writer); ModelAttributes.PATH.marshallAsAttribute(prop, writer); // Write out the extra properties ... if (has(prop, ModelKeys.PROPERTIES)) { ModelNode properties = prop.get(ModelKeys.PROPERTIES); for (Property property : properties.asPropertyList()) { writer.writeAttribute(property.getName(), property.getValue().asString()); } } writer.writeEndElement(); } writer.writeEndElement(); } }
private void writeTextExtraction(XMLExtendedStreamWriter writer, ModelNode repository) throws XMLStreamException { if (has(repository, ModelKeys.TEXT_EXTRACTOR)) { writer.writeStartElement(Element.TEXT_EXTRACTORS.getLocalName()); if (repository.hasDefined(ModelKeys.TEXT_EXTRACTORS_THREAD_POOL_NAME)) { writer.writeAttribute( Attribute.THREAD_POOL_NAME.getLocalName(), repository.get(ModelKeys.TEXT_EXTRACTORS_THREAD_POOL_NAME).asString()); } if (repository.hasDefined(ModelKeys.TEXT_EXTRACTORS_MAX_POOL_SIZE)) { writer.writeAttribute( Attribute.MAX_POOL_SIZE.getLocalName(), repository.get(ModelKeys.TEXT_EXTRACTORS_MAX_POOL_SIZE).asString()); } for (Property extractor : repository.get(ModelKeys.TEXT_EXTRACTOR).asPropertyList()) { writer.writeStartElement(Element.TEXT_EXTRACTOR.getLocalName()); writer.writeAttribute(Attribute.NAME.getLocalName(), extractor.getName()); ModelNode prop = extractor.getValue(); ModelAttributes.TEXT_EXTRACTOR_CLASSNAME.marshallAsAttribute(prop, writer); ModelAttributes.MODULE.marshallAsAttribute(prop, writer); // Write out the extra properties ... if (has(prop, ModelKeys.PROPERTIES)) { ModelNode properties = prop.get(ModelKeys.PROPERTIES); for (Property property : properties.asPropertyList()) { writer.writeAttribute(property.getName(), property.getValue().asString()); } } writer.writeEndElement(); } writer.writeEndElement(); } }
public void addDeploymentOverlays(final List<ModelNode> updates) { if (domainModel.hasDefined(DEPLOYMENT_OVERLAY)) { HostFileRepository remoteRepository = null; if (!domainController.getLocalHostInfo().isMasterDomainController()) { remoteRepository = domainController.getRemoteFileRepository(); } for (Property deploymentOverlay : domainModel.get(DEPLOYMENT_OVERLAY).asPropertyList()) { String name = deploymentOverlay.getName(); ModelNode details = deploymentOverlay.getValue(); PathAddress addr = PathAddress.pathAddress(PathElement.pathElement(DEPLOYMENT_OVERLAY, name)); ModelNode addOp = Util.getEmptyOperation(ADD, addr.toModelNode()); updates.add(addOp); if (details.hasDefined(CONTENT)) { for (Property content : details.get(CONTENT).asPropertyList()) { final String contentName = content.getName(); final ModelNode contentDetails = content.getValue(); byte[] hash = contentDetails.require(CONTENT).asBytes(); File[] files = domainController.getLocalFileRepository().getDeploymentFiles(hash); if (files == null || files.length == 0) { if (remoteRepository != null) { remoteRepository.getDeploymentFiles(hash); } } addr = PathAddress.pathAddress( PathElement.pathElement(DEPLOYMENT_OVERLAY, name), PathElement.pathElement(CONTENT, contentName)); addOp = Util.getEmptyOperation(ADD, addr.toModelNode()); addOp.get(CONTENT).get(HASH).set(contentDetails.get(CONTENT)); updates.add(addOp); } } if (serverGroup.hasDefined(DEPLOYMENT_OVERLAY)) { final ModelNode groupOverlay = serverGroup.get(DEPLOYMENT_OVERLAY).asObject(); if (groupOverlay.has(name)) { ModelNode deploymentsNode = groupOverlay.get(name); if (deploymentsNode.has(DEPLOYMENT)) { for (Property content : deploymentsNode.get(DEPLOYMENT).asPropertyList()) { final String deploymentName = content.getName(); final ModelNode deploymentDetails = content.getValue(); addr = PathAddress.pathAddress( PathElement.pathElement(DEPLOYMENT_OVERLAY, name), PathElement.pathElement(DEPLOYMENT, deploymentName)); addOp = Util.getEmptyOperation(ADD, addr.toModelNode()); updates.add(addOp); } } } } } } }
/** {@inheritDoc} */ @Override public void writeContent(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException { context.startSubsystemElement(Namespace.CURRENT.getUriString(), false); final ModelNode model = context.getModelNode(); writeWorkerThreadPoolIfAttributesSet(writer, model); if (model.hasDefined(CONNECTOR)) { final ModelNode connector = model.get(CONNECTOR); for (String name : connector.keys()) { writeConnector(writer, connector.require(name), name); } } if (model.hasDefined(OUTBOUND_CONNECTION) || model.hasDefined(REMOTE_OUTBOUND_CONNECTION) || model.hasDefined(LOCAL_OUTBOUND_CONNECTION)) { // write <outbound-connections> element writer.writeStartElement(Element.OUTBOUND_CONNECTIONS.getLocalName()); if (model.hasDefined(OUTBOUND_CONNECTION)) { final List<Property> outboundConnections = model.get(OUTBOUND_CONNECTION).asPropertyList(); for (Property property : outboundConnections) { final String connectionName = property.getName(); // get the specific outbound-connection final ModelNode genericOutboundConnectionModel = property.getValue(); // process and write outbound connection this.writeOutboundConnection(writer, connectionName, genericOutboundConnectionModel); } } if (model.hasDefined(REMOTE_OUTBOUND_CONNECTION)) { final List<Property> remoteOutboundConnections = model.get(REMOTE_OUTBOUND_CONNECTION).asPropertyList(); for (Property property : remoteOutboundConnections) { final String connectionName = property.getName(); // get the specific remote outbound connection final ModelNode remoteOutboundConnectionModel = property.getValue(); // process and write remote outbound connection this.writeRemoteOutboundConnection(writer, connectionName, remoteOutboundConnectionModel); } } if (model.hasDefined(LOCAL_OUTBOUND_CONNECTION)) { final List<Property> localOutboundConnections = model.get(LOCAL_OUTBOUND_CONNECTION).asPropertyList(); for (Property property : localOutboundConnections) { final String connectionName = property.getName(); // get the specific local outbound connection final ModelNode localOutboundConnectionModel = property.getValue(); // process and write local outbound connection this.writeLocalOutboundConnection(writer, connectionName, localOutboundConnectionModel); } } // </outbound-connections> writer.writeEndElement(); } writer.writeEndElement(); }
private void addSchemaLocations(Map<String, ModelNode> map, ModelNode namespaces) { if (namespaces.isDefined()) { for (Property prop : namespaces.asPropertyList()) { map.put( prop.getName(), SchemaLocationAddHandler.getAddSchemaLocationOperation( EMPTY, prop.getName(), prop.getValue().asString())); } } }
/** * Compares two models to make sure that they are the same * * @param node1 the first model * @param node2 the second model * @throws AssertionFailedError if the models were not the same */ protected void compare(ModelNode node1, ModelNode node2) { Assert.assertEquals(getCompareStackAsString() + " types", node1.getType(), node2.getType()); if (node1.getType() == ModelType.OBJECT) { final Set<String> keys1 = node1.keys(); final Set<String> keys2 = node2.keys(); Assert.assertEquals(node1 + "\n" + node2, keys1.size(), keys2.size()); for (String key : keys1) { final ModelNode child1 = node1.get(key); Assert.assertTrue("Missing: " + key + "\n" + node1 + "\n" + node2, node2.has(key)); final ModelNode child2 = node2.get(key); if (child1.isDefined()) { Assert.assertTrue(child1.toString(), child2.isDefined()); stack.get().push(key + "/"); compare(child1, child2); stack.get().pop(); } else { Assert.assertFalse(child2.asString(), child2.isDefined()); } } } else if (node1.getType() == ModelType.LIST) { List<ModelNode> list1 = node1.asList(); List<ModelNode> list2 = node2.asList(); Assert.assertEquals(list1 + "\n" + list2, list1.size(), list2.size()); for (int i = 0; i < list1.size(); i++) { stack.get().push(i + "/"); compare(list1.get(i), list2.get(i)); stack.get().pop(); } } else if (node1.getType() == ModelType.PROPERTY) { Property prop1 = node1.asProperty(); Property prop2 = node2.asProperty(); Assert.assertEquals(prop1 + "\n" + prop2, prop1.getName(), prop2.getName()); stack.get().push(prop1.getName() + "/"); compare(prop1.getValue(), prop2.getValue()); stack.get().pop(); } else { try { Assert.assertEquals( getCompareStackAsString() + "\n\"" + node1.asString() + "\"\n\"" + node2.asString() + "\"\n-----", node2.asString().trim(), node1.asString().trim()); } catch (AssertionFailedError error) { throw error; } } }
private void writeSequencing(XMLExtendedStreamWriter writer, ModelNode repository) throws XMLStreamException { if (has(repository, ModelKeys.SEQUENCER)) { writer.writeStartElement(Element.SEQUENCERS.getLocalName()); if (repository.hasDefined(ModelKeys.SEQUENCERS_THREAD_POOL_NAME)) { writer.writeAttribute( Attribute.THREAD_POOL_NAME.getLocalName(), repository.get(ModelKeys.SEQUENCERS_THREAD_POOL_NAME).asString()); } if (repository.hasDefined(ModelKeys.SEQUENCERS_MAX_POOL_SIZE)) { writer.writeAttribute( Attribute.MAX_POOL_SIZE.getLocalName(), repository.get(ModelKeys.SEQUENCERS_MAX_POOL_SIZE).asString()); } ModelNode sequencerNode = repository.get(ModelKeys.SEQUENCER); for (Property sequencer : sequencerNode.asPropertyList()) { writer.writeStartElement(Element.SEQUENCER.getLocalName()); writer.writeAttribute(Attribute.NAME.getLocalName(), sequencer.getName()); ModelNode prop = sequencer.getValue(); ModelAttributes.SEQUENCER_CLASSNAME.marshallAsAttribute(prop, writer); ModelAttributes.MODULE.marshallAsAttribute(prop, writer); // Write out the extra properties ... if (has(prop, ModelKeys.PROPERTIES)) { ModelNode properties = prop.get(ModelKeys.PROPERTIES); for (Property property : properties.asPropertyList()) { writer.writeAttribute(property.getName(), property.getValue().asString()); } } if (has(prop, ModelKeys.PATH_EXPRESSIONS)) { List<ModelNode> pathExpressions = prop.get(ModelKeys.PATH_EXPRESSIONS).asList(); switch (pathExpressions.size()) { case 0: break; case 1: ModelNode pathExpression = pathExpressions.iterator().next(); writer.writeAttribute( Attribute.PATH_EXPRESSION.getLocalName(), pathExpression.asString()); break; default: for (ModelNode pathExpr : pathExpressions) { writer.writeStartElement(Element.PATH_EXPRESSION.getLocalName()); writer.writeCharacters(pathExpr.asString()); writer.writeEndElement(); } } } writer.writeEndElement(); } writer.writeEndElement(); } }
public void persist(XMLExtendedStreamWriter writer, ModelNode model, String namespaceURI) throws XMLStreamException { boolean wildcard = getPathElement().isWildcard(); model = wildcard ? model.get(getPathElement().getKey()) : model.get(getPathElement().getKeyValuePair()); if (!model.isDefined() && !useValueAsElementName()) { return; } boolean isSubsystem = getPathElement().getKey().equals(ModelDescriptionConstants.SUBSYSTEM); boolean writeWrapper = getXmlWrapperElement() != null; if (writeWrapper) { writeStartElement(writer, namespaceURI, getXmlWrapperElement()); } if (wildcard) { for (Property p : model.asPropertyList()) { if (useValueAsElementName()) { writeStartElement(writer, namespaceURI, p.getName()); } else { writeStartElement(writer, namespaceURI, getXmlElementName()); writer.writeAttribute(NAME, p.getName()); } for (AttributeDefinition def : getAttributes()) { def.getAttributeMarshaller().marshallAsAttribute(def, p.getValue(), false, writer); } persistChildren(writer, p.getValue()); writer.writeEndElement(); } } else { if (useValueAsElementName()) { writeStartElement(writer, namespaceURI, getPathElement().getValue()); } else if (isSubsystem) { startSubsystemElement(writer, namespaceURI, getChildren().isEmpty()); } else { writeStartElement(writer, namespaceURI, getXmlElementName()); } for (AttributeDefinition def : getAttributes()) { def.getAttributeMarshaller().marshallAsAttribute(def, model, false, writer); } persistChildren(writer, model); writer.writeEndElement(); } if (writeWrapper) { writer.writeEndElement(); } }
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException { final ModelNode add = createEmptyAddOperation(); final ModelNode model = context.readModel(PathAddress.EMPTY_ADDRESS); if (model.hasDefined(BEAN_VALIDATION_ENABLED)) { add.get(BEAN_VALIDATION_ENABLED).set(model.get(BEAN_VALIDATION_ENABLED)); } if (model.hasDefined(ARCHIVE_VALIDATION_ENABLED)) { add.get(ARCHIVE_VALIDATION_ENABLED).set(model.get(ARCHIVE_VALIDATION_ENABLED)); } if (model.hasDefined(ARCHIVE_VALIDATION_FAIL_ON_ERROR)) { add.get(ARCHIVE_VALIDATION_FAIL_ON_ERROR).set(model.get(ARCHIVE_VALIDATION_FAIL_ON_ERROR)); } if (model.hasDefined(ARCHIVE_VALIDATION_FAIL_ON_WARN)) { add.get(ARCHIVE_VALIDATION_FAIL_ON_WARN).set(model.get(ARCHIVE_VALIDATION_FAIL_ON_WARN)); } if (model.hasDefined(CACHED_CONNECTION_MANAGER_DEBUG)) { add.get(CACHED_CONNECTION_MANAGER_DEBUG).set(model.get(CACHED_CONNECTION_MANAGER_DEBUG)); } if (model.hasDefined(CACHED_CONNECTION_MANAGER_ERROR)) { add.get(CACHED_CONNECTION_MANAGER_ERROR).set(model.get(CACHED_CONNECTION_MANAGER_ERROR)); } final ModelNode result = context.getResult(); result.add(add); if (model.hasDefined(THREAD_POOL)) { ModelNode pools = model.get(THREAD_POOL); for (Property poolProp : pools.asPropertyList()) { if (poolProp.getName().equals(LONG_RUNNING_THREADS)) { addBoundedQueueThreadPool( result, poolProp.getValue(), PathElement.pathElement(ModelDescriptionConstants.SUBSYSTEM, SUBSYSTEM_NAME), PathElement.pathElement(THREAD_POOL, LONG_RUNNING_THREADS)); } else if (poolProp.getName().equals(SHORT_RUNNING_THREADS)) { addBoundedQueueThreadPool( result, poolProp.getValue(), PathElement.pathElement(ModelDescriptionConstants.SUBSYSTEM, SUBSYSTEM_NAME), PathElement.pathElement(THREAD_POOL, SHORT_RUNNING_THREADS)); } } } context.completeStep(); }
@Override public void persist(XMLExtendedStreamWriter writer, ModelNode model) throws XMLStreamException { boolean wildcard = getPathElement().isWildcard(); model = wildcard ? model.get(getPathElement().getKey()) : model.get(getPathElement().getKeyValuePair()); if (!model.isDefined()) { return; } boolean writeWrapper = getXmlWrapperElement() != null; if (writeWrapper) { writer.writeStartElement(getXmlWrapperElement()); } writer.writeStartElement(getXmlElementName()); if (wildcard) { for (Property p : model.asPropertyList()) { writer.writeAttribute(NAME, p.getName()); for (AttributeDefinition def : getAttributes()) { def.getAttributeMarshaller().marshallAsAttribute(def, p.getValue(), false, writer); } persistChildren(writer, p.getValue()); } } else { for (AttributeDefinition def : getAttributes()) { def.getAttributeMarshaller().marshallAsAttribute(def, model, false, writer); } persistChildren(writer, model); } writer.writeEndElement(); if (writeWrapper) { writer.writeEndElement(); } }
private void setProps(ModelNode requestProperties) throws Exception { props = new TreeSet<RequestProp>(); if (opName.equals("add")) { UserObject usrObj = (UserObject) node.getUserObject(); props.add( new RequestProp( "/" + usrObj.getName() + "=<name>/", "Resource name for the new " + usrObj.getName(), true, ModelType.STRING)); } if (opName.equals("write-attribute") && node.isLeaf()) { ModelNode nameNode = requestProperties.get("name"); nameNode .get("type") .set(ModelType.UNDEFINED); // undefined type will display as uneditable String UserObject usrObj = (UserObject) node.getUserObject(); ModelNode nameNodeValue = new ModelNode(); nameNodeValue.set(usrObj.getName()); props.add(new RequestProp("name", requestProperties.get("name"), nameNodeValue)); ModelNode rscDesc = cliGuiCtx.getExecutor().doCommand(node.addressPath() + ":read-resource-description"); ModelNode valueNode = rscDesc.get("result", "attributes", usrObj.getName()); valueNode.get("required").set(false); // value is never required for write-attribute ModelNode valueNodeValue = usrObj.getBackingNode().get(usrObj.getName()); props.add(new RequestProp("value", valueNode, valueNodeValue)); return; } for (Property prop : requestProperties.asPropertyList()) { props.add(new RequestProp(prop.getName(), prop.getValue(), null)); } }
private void addInterfaces(Map<String, ModelNode> map, ModelNode iface) { if (iface.isDefined()) { for (Property prop : iface.asPropertyList()) { map.put(prop.getName(), prop.getValue()); } } }
static MailSessionConfig from(final OperationContext operationContext, final ModelNode model) throws OperationFailedException { MailSessionConfig cfg = new MailSessionConfig(); cfg.setJndiName( MailSessionDefinition.JNDI_NAME.resolveModelAttribute(operationContext, model).asString()); cfg.setDebug( MailSessionDefinition.DEBUG.resolveModelAttribute(operationContext, model).asBoolean()); if (MailSessionDefinition.FROM.resolveModelAttribute(operationContext, model).isDefined()) { cfg.setFrom( MailSessionDefinition.FROM.resolveModelAttribute(operationContext, model).asString()); } if (model.hasDefined(SERVER_TYPE)) { ModelNode server = model.get(SERVER_TYPE); if (server.hasDefined(SMTP)) { cfg.setSmtpServer(readServerConfig(operationContext, server.get(SMTP))); } if (server.hasDefined(POP3)) { cfg.setPop3Server(readServerConfig(operationContext, server.get(POP3))); } if (server.hasDefined(IMAP)) { cfg.setImapServer(readServerConfig(operationContext, server.get(IMAP))); } } if (model.hasDefined(CUSTOM)) { for (Property server : model.get(CUSTOM).asPropertyList()) { cfg.addCustomServer( readCustomServerConfig(server.getName(), operationContext, server.getValue())); } } return cfg; }
static ModelNode parseCustomLoadMetric(XMLExtendedStreamReader reader) throws XMLStreamException { final ModelNode load = new ModelNode(); final int count = reader.getAttributeCount(); for (int i = 0; i < count; i++) { requireNoNamespaceAttribute(reader, i); final String value = reader.getAttributeValue(i); final Attribute attribute = Attribute.forName(reader.getAttributeLocalName(i)); switch (attribute) { case CAPACITY: load.get(CAPACITY).set(value); break; case WEIGHT: load.get(WEIGHT).set(value); break; case CLASS: load.get(CLASS).set(value); break; default: unexpectedAttribute(reader, i); } } while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case PROPERTY: final Property property = parseProperty(reader); load.get("property").add(property.getName(), property.getValue()); break; default: unexpectedElement(reader); } } return load; }
void launchServices( final OperationContext context, final PathAddress pathAddress, final ModelNode model, final ServiceVerificationHandler verificationHandler, final List<ServiceController<?>> newControllers) throws OperationFailedException { Handler newHandler = new Handler(); newHandler.setClazz( HandlerResourceDefinition.CLASS.resolveModelAttribute(context, model).asString()); ModelNode handler = Resource.Tools.readModel(context.readResourceFromRoot(pathAddress)); if (handler.hasDefined(COMMON_HANDLER_PARAMETER.getName())) { for (ModelNode handlerParameter : handler.get(COMMON_HANDLER_PARAMETER.getName()).asList()) { Property property = handlerParameter.asProperty(); String paramName = property.getName(); String paramValue = HandlerParameterResourceDefinition.VALUE .resolveModelAttribute(context, property.getValue()) .asString(); KeyValueType kv = new KeyValueType(); kv.setKey(paramName); kv.setValue(paramValue); newHandler.add(kv); } } SAMLHandlerService service = new SAMLHandlerService(newHandler); PathElement providerAlias = pathAddress.subAddress(0, pathAddress.size() - 1).getLastElement(); ServiceTarget serviceTarget = context.getServiceTarget(); ServiceBuilder<SAMLHandlerService> serviceBuilder = serviceTarget.addService( createServiceName(providerAlias.getValue(), newHandler.getClazz()), service); ServiceName serviceName; if (providerAlias.getKey().equals(IDENTITY_PROVIDER.getName())) { serviceName = IdentityProviderService.createServiceName(providerAlias.getValue()); } else { serviceName = ServiceProviderService.createServiceName(providerAlias.getValue()); } serviceBuilder.addDependency( serviceName, EntityProviderService.class, service.getEntityProviderService()); ServiceController<SAMLHandlerService> controller = serviceBuilder .addListener(verificationHandler) .setInitialMode(ServiceController.Mode.PASSIVE) .install(); if (newControllers != null) { newControllers.add(controller); } }
/* Property logic */ static void writeProperty(final XMLExtendedStreamWriter writer, Property property) throws XMLStreamException { writer.writeStartElement(Element.PROPERTY.getLocalName()); writer.writeAttribute(NAME, property.getName()); writer.writeAttribute(VALUE, property.getValue().asString()); writer.writeEndElement(); }
private static ModelFixer createModelFixer(ModelVersion version) { return (ModelNode model) -> { if (InfinispanModel.VERSION_4_1_0.requiresTransformation(version)) { final ModelNode maximal = model.get("cache-container", "maximal"); maximal .asPropertyList() .stream() .filter( caches -> caches.getName().equals("distributed-cache") || caches.getName().equals("replicated-cache")) .forEach( p -> { ModelNode caches = maximal.get(p.getName()); final List<Property> cachesModel = caches.asPropertyList(); for (Property cacheName : cachesModel) { final ModelNode cache = caches.get(cacheName.getName()); if (cache.hasDefined("component")) { cache.get("component", "backups").set(new ModelNode()); } } }); } if (InfinispanModel.VERSION_4_0_0.requiresTransformation(version)) { // Fix the legacy model to expect new default values applied in // StateTransferResourceDefinition#buildTransformation Arrays.asList("cache-with-string-keyed-store", "cache-with-binary-keyed-store") .forEach( cacheName -> { ModelNode cache = model.get("cache-container", "maximal", "replicated-cache", cacheName); assertFalse( cache.hasDefined( StateTransferResourceDefinition.LEGACY_PATH.getKeyValuePair())); ModelNode stateTransfer = cache.get(StateTransferResourceDefinition.LEGACY_PATH.getKeyValuePair()); stateTransfer .get( StateTransferResourceDefinition.Attribute.CHUNK_SIZE .getDefinition() .getName()) .set( StateTransferResourceDefinition.Attribute.CHUNK_SIZE .getDefinition() .getDefaultValue()); stateTransfer .get( StateTransferResourceDefinition.Attribute.TIMEOUT .getDefinition() .getName()) .set( StateTransferResourceDefinition.Attribute.TIMEOUT .getDefinition() .getDefaultValue()); }); } return model; }; }
private OpenMBeanParameterInfo[] getParameterInfos(ModelNode opNode) { if (!opNode.hasDefined(REQUEST_PROPERTIES)) { return EMPTY_PARAMETERS; } List<OpenMBeanParameterInfo> params = new ArrayList<OpenMBeanParameterInfo>(); for (Property prop : opNode.get(REQUEST_PROPERTIES).asPropertyList()) { ModelNode value = prop.getValue(); final String paramName = NameConverter.convertToCamelCase(prop.getName()); Map<String, String> descriptions = new HashMap<String, String>(); descriptions.put( DESC_EXPRESSIONS_ALLOWED, String.valueOf( prop.getValue().hasDefined(EXPRESSIONS_ALLOWED) && prop.getValue().get(EXPRESSIONS_ALLOWED).asBoolean())); params.add( new OpenMBeanParameterInfoSupport( paramName, getDescription(prop.getValue()), converters.convertToMBeanType(value), new ImmutableDescriptor(descriptions))); } return params.toArray(new OpenMBeanParameterInfo[params.size()]); }
private void addThreadFactories(final ModelNode result, final ModelNode model) { if (model.hasDefined(THREAD_FACTORY)) { ModelNode pools = model.get(THREAD_FACTORY); for (Property poolProp : pools.asPropertyList()) { final ModelNode operation = Util.getEmptyOperation( ADD, pathAddress( PathElement.pathElement(SUBSYSTEM, SUBSYSTEM_NAME), PathElement.pathElement(THREAD_FACTORY, poolProp.getName()))); final ModelNode pool = poolProp.getValue(); operation.get(NAME).set(pool.require(NAME)); if (pool.hasDefined(GROUP_NAME)) { operation.get(GROUP_NAME).set(pool.get(GROUP_NAME)); } if (pool.hasDefined(THREAD_NAME_PATTERN)) { operation.get(THREAD_NAME_PATTERN).set(pool.get(THREAD_NAME_PATTERN)); } if (pool.hasDefined(PRIORITY)) { operation.get(PRIORITY).set(pool.get(PRIORITY)); } if (pool.hasDefined(PROPERTIES)) { operation.get(PROPERTIES).set(pool.get(PROPERTIES)); } result.add(operation); } } }
private void addUnboundedQueueThreadPools(final ModelNode result, final ModelNode model) { if (model.hasDefined(UNBOUNDED_QUEUE_THREAD_POOL)) { ModelNode pools = model.get(UNBOUNDED_QUEUE_THREAD_POOL); for (Property poolProp : pools.asPropertyList()) { final ModelNode operation = Util.getEmptyOperation( ADD, pathAddress( PathElement.pathElement(SUBSYSTEM, SUBSYSTEM_NAME), PathElement.pathElement(UNBOUNDED_QUEUE_THREAD_POOL, poolProp.getName()))); final ModelNode pool = poolProp.getValue(); operation.get(NAME).set(pool.require(NAME)); if (pool.hasDefined(THREAD_FACTORY)) { operation.get(THREAD_FACTORY).set(pool.get(THREAD_FACTORY)); } if (pool.hasDefined(PROPERTIES)) { operation.get(PROPERTIES).set(pool.get(PROPERTIES)); } if (pool.hasDefined(MAX_THREADS)) { operation.get(MAX_THREADS).set(pool.get(MAX_THREADS)); } if (pool.hasDefined(KEEPALIVE_TIME)) { operation.get(KEEPALIVE_TIME).set(pool.get(KEEPALIVE_TIME)); } result.add(operation); } } }
private void parseServerProfile( final XMLExtendedStreamReader reader, final ModelNode address, final List<ModelNode> list) throws XMLStreamException { // Attributes requireNoAttributes(reader); // Content final Set<String> configuredSubsystemTypes = new HashSet<String>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { if (Element.forName(reader.getLocalName()) != Element.SUBSYSTEM) { throw unexpectedElement(reader); } if (!configuredSubsystemTypes.add(reader.getNamespaceURI())) { throw AppClientLogger.ROOT_LOGGER.duplicateSubsystemDeclaration(reader.getLocation()); } // parse subsystem final List<ModelNode> subsystems = new ArrayList<ModelNode>(); reader.handleAny(subsystems); // Process subsystems for (final ModelNode update : subsystems) { // Process relative subsystem path address final ModelNode subsystemAddress = address.clone(); for (final Property path : update.get(OP_ADDR).asPropertyList()) { subsystemAddress.add(path.getName(), path.getValue().asString()); } update.get(OP_ADDR).set(subsystemAddress); list.add(update); } } }
private void addPaths(Map<String, ModelNode> map, ModelNode path) { if (path.isDefined()) { for (Property prop : path.asPropertyList()) { // TODO merge rather than replace existing? map.put(prop.getName(), prop.getValue()); } } }
private Map<String, ModelNode> createMapIndexedByKey(ModelNode node) { Map<String, ModelNode> map = new HashMap<String, ModelNode>(); if (!node.isDefined()) { return map; } for (Property prop : node.asPropertyList()) { map.put(prop.getName(), prop.getValue()); } return map; }
private Map<String, ModelNode> getSortedEntryMap(ModelNode parent, String name) { if (!parent.hasDefined(name)) { return Collections.emptyMap(); } Map<String, ModelNode> sorted = new TreeMap<String, ModelNode>(); for (Property prop : parent.get(name).asPropertyList()) { sorted.put(prop.getName(), prop.getValue()); } return sorted; }
static void addConnectorServiceConfigs(final Configuration configuration, final ModelNode model) throws OperationFailedException { if (model.hasDefined(CommonAttributes.CONNECTOR_SERVICE)) { final List<ConnectorServiceConfiguration> configs = configuration.getConnectorServiceConfigurations(); for (Property prop : model.get(CommonAttributes.CONNECTOR_SERVICE).asPropertyList()) { configs.add(createConnectorServiceConfiguration(prop.getName(), prop.getValue())); } } }