public abstract class HibernateTestSupport { private final ILogger logger = Logger.getLogger(getClass()); @BeforeClass @AfterClass public static void tearUpAndDown() { Hazelcast.shutdownAll(); } @After public final void cleanup() { Hazelcast.shutdownAll(); } protected void sleep(int seconds) { try { Thread.sleep(1000 * seconds); } catch (InterruptedException e) { logger.warning("", e); } } protected static SessionFactory createSessionFactory(Properties props) { Configuration conf = new Configuration(); URL xml = HibernateTestSupport.class.getClassLoader().getResource("test-hibernate.cfg.xml"); props.put(CacheEnvironment.EXPLICIT_VERSION_CHECK, "true"); conf.addProperties(props); conf.configure(xml); final SessionFactory sf = conf.buildSessionFactory(); sf.getStatistics().setStatisticsEnabled(true); return sf; } }
public static int calculateVersion(String version) { if (null == version) { return UNKNOWN_HAZELCAST_VERSION; } Matcher matcher = VERSION_PATTERN.matcher(version); if (matcher.matches()) { try { int calculatedVersion = MAJOR_VERSION_MULTIPLIER * Integer.parseInt(matcher.group(1)) + MINOR_VERSION_MULTIPLIER * Integer.parseInt(matcher.group(2)); int groupCount = matcher.groupCount(); if (groupCount >= PATCH_GROUP_COUNT) { String patchVersionString = matcher.group(PATCH_GROUP_COUNT); if (null != patchVersionString && !patchVersionString.startsWith("-")) { calculatedVersion += Integer.parseInt(patchVersionString); } } return calculatedVersion; } catch (Exception e) { Logger.getLogger(BuildInfo.class) .warning("Failed to calculate version using version string " + version, e); } } return UNKNOWN_HAZELCAST_VERSION; }
@Before public void setup() { ILogger logger = Logger.getLogger(getClass()); HazelcastThreadGroup threadGroup = new HazelcastThreadGroup("test", logger, getClass().getClassLoader()); responsePacketHandler = mock(PacketHandler.class); asyncHandler = new AsyncResponseHandler(threadGroup, logger, responsePacketHandler); asyncHandler.start(); serializationService = new DefaultSerializationServiceBuilder().build(); }
public static int getLockTimeoutInMillis(Properties props) { int timeout = -1; try { timeout = PropertiesHelper.getInt(LOCK_TIMEOUT, props, -1); } catch (Exception e) { Logger.getLogger(CacheEnvironment.class).finest(e); } if (timeout < 0) { timeout = MAXIMUM_LOCK_TIMEOUT; } return timeout; }
@Before public void setup() { metricsRegistry = new MetricsRegistryImpl(Logger.getLogger(MetricsRegistryImpl.class), ProbeLevel.INFO); for (String name : metricsRegistry.getNames()) { ProbeInstance probeInstance = metricsRegistry.getProbeInstance(name); if (probeInstance != null && probeInstance.source != null) { metricsRegistry.deregister(probeInstance.source); } } }
public ClientConnectionManagerImpl( HazelcastClient client, LoadBalancer loadBalancer, AddressTranslator addressTranslator) { this.client = client; this.addressTranslator = addressTranslator; final ClientConfig config = client.getClientConfig(); final ClientNetworkConfig networkConfig = config.getNetworkConfig(); final int connTimeout = networkConfig.getConnectionTimeout(); connectionTimeout = connTimeout == 0 ? Integer.MAX_VALUE : connTimeout; final ClientProperties clientProperties = client.getClientProperties(); int timeout = clientProperties.getHeartbeatTimeout().getInteger(); this.heartBeatTimeout = timeout > 0 ? timeout : Integer.parseInt(PROP_HEARTBEAT_TIMEOUT_DEFAULT); int interval = clientProperties.getHeartbeatInterval().getInteger(); heartBeatInterval = interval > 0 ? interval : Integer.parseInt(PROP_HEARTBEAT_INTERVAL_DEFAULT); smartRouting = networkConfig.isSmartRouting(); executionService = (ClientExecutionServiceImpl) client.getClientExecutionService(); credentials = initCredentials(config); router = new Router(loadBalancer); inSelector = new InSelectorImpl( client.getThreadGroup(), "InSelector", Logger.getLogger(InSelectorImpl.class), OUT_OF_MEMORY_HANDLER); outSelector = new OutSelectorImpl( client.getThreadGroup(), "OutSelector", Logger.getLogger(OutSelectorImpl.class), OUT_OF_MEMORY_HANDLER); socketInterceptor = initSocketInterceptor(networkConfig.getSocketInterceptorConfig()); socketOptions = networkConfig.getSocketOptions(); socketChannelWrapperFactory = initSocketChannel(networkConfig); }
private void addNearCacheInvalidateListener() { try { ClientMessage request = ReplicatedMapAddNearCacheEntryListenerCodec.encodeRequest(getName(), false); EventHandler handler = new ReplicatedMapAddNearCacheEventHandler(); String registrationId = getContext().getListenerService().startListening(request, null, handler); nearCache.setId(registrationId); } catch (Exception e) { Logger.getLogger(ClientHeapNearCache.class) .severe("-----------------\n Near Cache is not initialized!!! \n-----------------", e); } }
public HazelcastClientCacheManager( HazelcastClientCachingProvider cachingProvider, HazelcastInstance hazelcastInstance, URI uri, ClassLoader classLoader, Properties properties) { super(cachingProvider, uri, classLoader, properties); if (hazelcastInstance == null) { throw new NullPointerException("hazelcastInstance missing"); } this.hazelcastInstance = hazelcastInstance; final ClientCacheDistributedObject setupRef = hazelcastInstance.getDistributedObject(CacheService.SERVICE_NAME, "setupRef"); this.clientContext = setupRef.getClientContext(); logger = Logger.getLogger(getClass()); }
public class UrlXmlConfig extends Config { private final ILogger logger = Logger.getLogger(UrlXmlConfig.class.getName()); public UrlXmlConfig() {} public UrlXmlConfig(String url) throws MalformedURLException, IOException { this(new URL(url)); } public UrlXmlConfig(URL url) throws IOException { super(); logger.log(Level.INFO, "Configuring Hazelcast from '" + url.toString() + "'."); InputStream in = url.openStream(); new XmlConfigBuilder(in).build(this); } }
public static <T> Iterator<T> iterator( final Class<T> clazz, String factoryId, ClassLoader classLoader) throws Exception { final List<ClassLoader> classLoaders = selectClassLoaders(classLoader); final Set<URLDefinition> factoryUrls = new HashSet<URLDefinition>(); for (ClassLoader selectedClassLoader : classLoaders) { factoryUrls.addAll(collectFactoryUrls(factoryId, selectedClassLoader)); } final Set<ServiceDefinition> serviceDefinitions = new HashSet<ServiceDefinition>(); for (URLDefinition urlDefinition : factoryUrls) { serviceDefinitions.addAll(parse(urlDefinition)); } if (serviceDefinitions.isEmpty()) { Logger.getLogger(ServiceLoader.class) .warning( "Service loader could not load 'META-INF/services/" + factoryId + "' It may be empty or does not exist."); } return new Iterator<T>() { final Iterator<ServiceDefinition> iterator = serviceDefinitions.iterator(); public boolean hasNext() { return iterator.hasNext(); } public T next() { final ServiceDefinition definition = iterator.next(); try { String className = definition.className; ClassLoader classLoader = definition.classLoader; return clazz.cast(ClassLoaderUtil.newInstance(classLoader, className)); } catch (Exception e) { throw new HazelcastException(e); } } public void remove() { throw new UnsupportedOperationException(); } }; }
@Before public void setup() throws Exception { loggingService = new LoggingServiceImpl("foo", "jdk", new BuildInfo("1", "1", "1", 1, false, (byte) 1)); serializationService = new DefaultSerializationServiceBuilder().build(); config = new Config(); config.setProperty(GroupProperty.PARTITION_COUNT, "10"); config.setProperty(GroupProperty.PARTITION_OPERATION_THREAD_COUNT, "10"); config.setProperty(GroupProperty.GENERIC_OPERATION_THREAD_COUNT, "10"); thisAddress = new Address("localhost", 5701); threadGroup = new HazelcastThreadGroup( "foo", loggingService.getLogger(HazelcastThreadGroup.class), Thread.currentThread().getContextClassLoader()); nodeExtension = new DefaultNodeExtension(); handlerFactory = new DummyOperationRunnerFactory(); metricsRegistry = new MetricsRegistryImpl(Logger.getLogger(MetricsRegistry.class), INFO); responsePacketHandler = new DummyResponsePacketHandler(); }
public class SmartClientConnectionManager implements ClientConnectionManager { private static final ILogger logger = Logger.getLogger(ClientConnectionManager.class); private final int poolSize; private final Authenticator authenticator; private final HazelcastClient client; private final Router router; private final ConcurrentMap<Address, ObjectPool<ConnectionWrapper>> poolMap = new ConcurrentHashMap<Address, ObjectPool<ConnectionWrapper>>(16, 0.75f, 1); private final SocketOptions socketOptions; private final SocketInterceptor socketInterceptor; private final HeartBeatChecker heartbeat; private volatile boolean live = true; public SmartClientConnectionManager( HazelcastClient client, Authenticator authenticator, LoadBalancer loadBalancer) { this.authenticator = authenticator; this.client = client; ClientConfig config = client.getClientConfig(); router = new Router(loadBalancer); // init socketInterceptor SocketInterceptorConfig sic = config.getSocketInterceptorConfig(); if (sic != null && sic.isEnabled()) { SocketInterceptor implementation = (SocketInterceptor) sic.getImplementation(); if (implementation == null && sic.getClassName() != null) { try { implementation = (SocketInterceptor) Class.forName(sic.getClassName()).newInstance(); } catch (Throwable e) { logger.severe("SocketInterceptor class cannot be instantiated!" + sic.getClassName(), e); } } if (implementation != null) { if (!(implementation instanceof MemberSocketInterceptor)) { logger.severe( "SocketInterceptor must be instance of " + MemberSocketInterceptor.class.getName()); implementation = null; } else { logger.info("SocketInterceptor is enabled"); } } if (implementation != null) { socketInterceptor = implementation; socketInterceptor.init(sic.getProperties()); } else { socketInterceptor = null; } } else { socketInterceptor = null; } poolSize = config.getConnectionPoolSize(); int connectionTimeout = config.getConnectionTimeout(); heartbeat = new HeartBeatChecker( connectionTimeout, client.getSerializationService(), client.getClientExecutionService()); socketOptions = config.getSocketOptions(); } public Connection firstConnection(Address address, Authenticator authenticator) throws IOException { return newConnection(address, authenticator); } public Connection newConnection(Address address, Authenticator authenticator) throws IOException { checkLive(); final ConnectionImpl connection = new ConnectionImpl(address, socketOptions, client.getSerializationService()); if (socketInterceptor != null) { socketInterceptor.onConnect(connection.getSocket()); } connection.init(); authenticator.auth(connection); return connection; } public Connection getRandomConnection() throws IOException { checkLive(); final Address address = router.next(); if (address == null) { throw new IOException("LoadBalancer '" + router + "' could not find a address to route to"); } return getConnection(address); } public Connection getConnection(Address address) throws IOException { checkLive(); if (address == null) { throw new IllegalArgumentException("Target address is required!"); } final ObjectPool<ConnectionWrapper> pool = getConnectionPool(address); if (pool == null) { return null; } Connection connection = null; try { connection = pool.take(); } catch (Exception e) { if (logger.isFinestEnabled()) { logger.warning("Error during connection creation... To -> " + address, e); } } // Could be that this address is dead and that's why pool is not able to create and give a // connection. // We will call it again, and hopefully at some time LoadBalancer will give us the right target // for the connection. if (connection != null && !heartbeat.checkHeartBeat(connection)) { logger.warning(connection + " failed to heartbeat, closing..."); connection.close(); connection = null; } return connection; } private void checkLive() { if (!live) { throw new HazelcastInstanceNotActiveException(); } } private final ConstructorFunction<Address, ObjectPool<ConnectionWrapper>> ctor = new ConstructorFunction<Address, ObjectPool<ConnectionWrapper>>() { public ObjectPool<ConnectionWrapper> createNew(final Address address) { return new QueueBasedObjectPool<ConnectionWrapper>( poolSize, new Factory<ConnectionWrapper>() { public ConnectionWrapper create() throws IOException { return new ConnectionWrapper(newConnection(address, authenticator)); } }, new Destructor<ConnectionWrapper>() { public void destroy(ConnectionWrapper connection) { connection.close(); } }); } }; private ObjectPool<ConnectionWrapper> getConnectionPool(final Address address) { checkLive(); ObjectPool<ConnectionWrapper> pool = poolMap.get(address); if (pool == null) { if (client.getClientClusterService().getMember(address) == null) { return null; } pool = ctor.createNew(address); ObjectPool<ConnectionWrapper> current = poolMap.putIfAbsent(address, pool); pool = current == null ? pool : current; } return pool; } private class ConnectionWrapper implements Connection { final Connection connection; private ConnectionWrapper(Connection connection) { this.connection = connection; } public Address getRemoteEndpoint() { return connection.getRemoteEndpoint(); } public boolean write(Data data) throws IOException { return connection.write(data); } public Data read() throws IOException { return connection.read(); } public void release() throws IOException { releaseConnection(this); } public void close() { logger.info("Closing connection -> " + connection); IOUtil.closeResource(connection); } public int getId() { return connection.getId(); } public long getLastReadTime() { return connection.getLastReadTime(); } public void setRemoteEndpoint(Address address) { connection.setRemoteEndpoint(address); } public String toString() { return connection.toString(); } public InetSocketAddress getLocalSocketAddress() { return connection.getLocalSocketAddress(); } } private void releaseConnection(ConnectionWrapper connection) { if (live) { final ObjectPool<ConnectionWrapper> pool = poolMap.get(connection.getRemoteEndpoint()); if (pool != null) { pool.release(connection); } else { connection.close(); } } else { connection.close(); } } public void removeConnectionPool(Address address) { ObjectPool<ConnectionWrapper> pool = poolMap.remove(address); if (pool != null) { pool.destroy(); } } public void shutdown() { live = false; for (ObjectPool<ConnectionWrapper> pool : poolMap.values()) { pool.destroy(); } poolMap.clear(); } }
public class MapTTLSaturationTest { private static final ILogger LOGGER = Logger.getLogger(MapTTLSaturationTest.class); // properties public String basename = MapTTLSaturationTest.class.getSimpleName(); public int threadCount = 3; public double maxHeapUsagePercentage = 80; private TestContext testContext; private IMap<Long, Long> map; @Setup public void setup(TestContext testContext) throws Exception { this.testContext = testContext; map = testContext.getTargetInstance().getMap(basename); } private double heapUsedPercentage() { long total = Runtime.getRuntime().totalMemory(); long max = Runtime.getRuntime().maxMemory(); return (100d * total) / max; } @Run public void run() { ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId()); for (int i = 0; i < threadCount; i++) { spawner.spawn(new Worker()); } spawner.awaitCompletion(); } private class Worker implements Runnable { @Override public void run() { long free = Runtime.getRuntime().freeMemory(); long total = Runtime.getRuntime().totalMemory(); long baseLineUsed = total - free; long maxBytes = Runtime.getRuntime().maxMemory(); double usedOfMax = 100.0 * ((double) baseLineUsed / (double) maxBytes); LOGGER.info(basename + " before Init"); LOGGER.info(basename + " free = " + humanReadableByteCount(free, true) + " = " + free); LOGGER.info( basename + " used = " + humanReadableByteCount(baseLineUsed, true) + " = " + baseLineUsed); LOGGER.info(basename + " max = " + humanReadableByteCount(maxBytes, true) + " = " + maxBytes); LOGGER.info(basename + " usedOfMax = " + usedOfMax + '%'); int counter = 1; Random random = new Random(); while (!testContext.isStopped()) { double usedPercentage = heapUsedPercentage(); if (usedPercentage >= maxHeapUsagePercentage) { LOGGER.info("heap used: " + usedPercentage + " % map.size:" + map.size()); sleepMillis(10000); } else { for (int i = 0; i < 1000; i++) { counter++; if (counter % 100000 == 0) { LOGGER.info( "at:" + counter + " heap used: " + usedPercentage + " % map.size:" + map.size()); } long key = random.nextLong(); map.put(key, 0L, 24, TimeUnit.HOURS); } } } free = Runtime.getRuntime().freeMemory(); total = Runtime.getRuntime().totalMemory(); long nowUsed = total - free; maxBytes = Runtime.getRuntime().maxMemory(); usedOfMax = 100.0 * ((double) nowUsed / (double) maxBytes); LOGGER.info(basename + " After Init"); LOGGER.info(basename + " map = " + map.size()); LOGGER.info(basename + " free = " + humanReadableByteCount(free, true) + " = " + free); LOGGER.info(basename + " used = " + humanReadableByteCount(nowUsed, true) + " = " + nowUsed); LOGGER.info(basename + " max = " + humanReadableByteCount(maxBytes, true) + " = " + maxBytes); LOGGER.info(basename + " usedOfMax = " + usedOfMax + '%'); LOGGER.info(basename + " map size:" + map.size()); } } @Verify(global = false) public void globalVerify() throws Exception { long free = Runtime.getRuntime().freeMemory(); long total = Runtime.getRuntime().totalMemory(); long used = total - free; long maxBytes = Runtime.getRuntime().maxMemory(); double usedOfMax = 100.0 * ((double) used / (double) maxBytes); LOGGER.info(basename + " map = " + map.size()); LOGGER.info(basename + "free = " + humanReadableByteCount(free, true) + " = " + free); LOGGER.info(basename + "used = " + humanReadableByteCount(used, true) + " = " + used); LOGGER.info(basename + "max = " + humanReadableByteCount(maxBytes, true) + " = " + maxBytes); LOGGER.info(basename + "usedOfMax = " + usedOfMax + '%'); } public static void main(String[] args) throws Exception { Config config = new Config(); config.addMapConfig( new MapConfig("mapttlsaturation*").setBackupCount(0).setStatisticsEnabled(false)); HazelcastInstance hz = Hazelcast.newHazelcastInstance(config); new TestRunner<MapTTLSaturationTest>(new MapTTLSaturationTest()) .withHazelcastInstance(hz) .withDuration(6000) .run(); } }
public abstract class ClientListenerServiceImpl implements ClientListenerService, MembershipListener { protected final HazelcastClientInstanceImpl client; protected final ClientExecutionServiceImpl executionService; protected final SerializationService serializationService; protected final ClientInvocationService invocationService; private final ConcurrentMap<Integer, EventHandler> eventHandlerMap = new ConcurrentHashMap<Integer, EventHandler>(); private final ILogger logger = Logger.getLogger(ClientInvocationService.class); private final StripedExecutor eventExecutor; public ClientListenerServiceImpl( HazelcastClientInstanceImpl client, int eventThreadCount, int eventQueueCapacity) { this.client = client; this.executionService = (ClientExecutionServiceImpl) client.getClientExecutionService(); this.invocationService = client.getInvocationService(); this.serializationService = client.getSerializationService(); client.getClientClusterService().addMembershipListener(this); this.eventExecutor = new StripedExecutor( logger, client.getName() + ".event", client.getThreadGroup(), eventThreadCount, eventQueueCapacity); } public void addEventHandler(int callId, EventHandler handler) { eventHandlerMap.put(callId, handler); } protected void removeEventHandler(int callId) { eventHandlerMap.remove(callId); } public void handleClientMessage(ClientMessage clientMessage, Connection connection) { try { eventExecutor.execute(new ClientEventProcessor(clientMessage, (ClientConnection) connection)); } catch (RejectedExecutionException e) { logger.log(Level.WARNING, " event clientMessage could not be handled ", e); } } public void shutdown() { eventExecutor.shutdown(); } public StripedExecutor getEventExecutor() { return eventExecutor; } private final class ClientEventProcessor implements StripedRunnable { final ClientMessage clientMessage; final ClientConnection connection; private ClientEventProcessor(ClientMessage clientMessage, ClientConnection connection) { this.clientMessage = clientMessage; this.connection = connection; } @Override public void run() { try { int correlationId = clientMessage.getCorrelationId(); final EventHandler eventHandler = eventHandlerMap.get(correlationId); if (eventHandler == null) { logger.warning( "No eventHandler for callId: " + correlationId + ", event: " + clientMessage); return; } eventHandler.handle(clientMessage); } finally { connection.decrementPendingPacketCount(); } } @Override public int getKey() { return clientMessage.getPartitionId(); } } }
class ClientMembershipListener implements EventHandler<ClientMessage> { public static final int INITIAL_MEMBERS_TIMEOUT_SECONDS = 5; private static final ILogger LOGGER = com.hazelcast.logging.Logger.getLogger(ClientMembershipListener.class); private final List<MemberImpl> members = new LinkedList<MemberImpl>(); private final HazelcastClientInstanceImpl client; private final ClientClusterServiceImpl clusterService; private final ClientPartitionServiceImpl partitionService; private final ClientConnectionManagerImpl connectionManager; private volatile CountDownLatch initialListFetchedLatch; public ClientMembershipListener(HazelcastClientInstanceImpl client) { this.client = client; connectionManager = (ClientConnectionManagerImpl) client.getConnectionManager(); partitionService = (ClientPartitionServiceImpl) client.getClientPartitionService(); clusterService = (ClientClusterServiceImpl) client.getClientClusterService(); } @Override public void handle(ClientMessage clientMessage) { if (clientMessage.getMessageType() == ClientMessageType.MEMBER_LIST_RESULT.id()) { final MemberListResultParameters memberListResultParameters = MemberListResultParameters.decode(clientMessage); initialMembers(memberListResultParameters.memberList); initialListFetchedLatch.countDown(); } else if (clientMessage.getMessageType() == ClientMessageType.MEMBER_RESULT.id()) { handleMember(clientMessage); } else if (clientMessage.getMessageType() == ClientMessageType.MEMBER_ATTRIBUTE_RESULT.id()) { final MemberAttributeChangeResultParameters parameters = MemberAttributeChangeResultParameters.decode(clientMessage); memberAttributeChanged(parameters.memberAttributeChange); } else { LOGGER.warning("Unknown message type :" + clientMessage.getMessageType()); } } private void handleMember(ClientMessage clientMessage) { final MemberResultParameters memberResultParameters = MemberResultParameters.decode(clientMessage); switch (memberResultParameters.eventType) { case MemberResultParameters.MEMBER_ADDED: memberAdded(memberResultParameters.member); break; case MemberResultParameters.MEMBER_REMOVED: memberRemoved(memberResultParameters.member); break; default: LOGGER.warning("Unknown event type :" + memberResultParameters.eventType); } partitionService.refreshPartitions(); } @Override public void beforeListenerRegister() {} @Override public void onListenerRegister() {} void listenMembershipEvents(Address ownerConnectionAddress) { initialListFetchedLatch = new CountDownLatch(1); try { ClientMessage clientMessage = RegisterMembershipListenerParameters.encode(); Connection connection = connectionManager.getConnection(ownerConnectionAddress); if (connection == null) { System.out.println("FATAL connection null " + ownerConnectionAddress); throw new IllegalStateException( "Can not load initial members list because owner connection is null. " + "Address " + ownerConnectionAddress); } ClientInvocation invocation = new ClientInvocation(client, this, clientMessage, connection); invocation.invoke().get(); waitInitialMemberListFetched(); } catch (Exception e) { if (client.getLifecycleService().isRunning()) { if (LOGGER.isFinestEnabled()) { LOGGER.warning( "Error while registering to cluster events! -> " + ownerConnectionAddress, e); } else { LOGGER.warning( "Error while registering to cluster events! -> " + ownerConnectionAddress + ", Error: " + e.toString()); } } } } private void waitInitialMemberListFetched() throws InterruptedException { boolean success = initialListFetchedLatch.await(INITIAL_MEMBERS_TIMEOUT_SECONDS, TimeUnit.SECONDS); if (!success) { LOGGER.warning("Error while getting initial member list from cluster!"); } } void initialMembers(Collection<MemberImpl> memberList) { Map<String, MemberImpl> prevMembers = Collections.emptyMap(); if (!members.isEmpty()) { prevMembers = new HashMap<String, MemberImpl>(members.size()); for (MemberImpl member : members) { prevMembers.put(member.getUuid(), member); } members.clear(); } members.addAll(memberList); final List<MembershipEvent> events = detectMembershipEvents(prevMembers); if (events.size() != 0) { applyMemberListChanges(); } fireMembershipEvent(events); } private void memberRemoved(MemberImpl member) { members.remove(member); final Connection connection = connectionManager.getConnection(member.getAddress()); if (connection != null) { connectionManager.destroyConnection(connection); } applyMemberListChanges(); MembershipEvent event = new MembershipEvent( client.getCluster(), member, ClientInitialMembershipEvent.MEMBER_REMOVED, Collections.unmodifiableSet(new LinkedHashSet<Member>(members))); clusterService.fireMembershipEvent(event); } private void memberAttributeChanged(MemberAttributeChange memberAttributeChange) { Map<Address, MemberImpl> memberMap = clusterService.getMembersRef(); if (memberMap == null) { return; } if (memberAttributeChange == null) { return; } for (MemberImpl target : memberMap.values()) { if (target.getUuid().equals(memberAttributeChange.getUuid())) { final MemberAttributeOperationType operationType = memberAttributeChange.getOperationType(); final String key = memberAttributeChange.getKey(); final Object value = memberAttributeChange.getValue(); target.updateAttribute(operationType, key, value); MemberAttributeEvent memberAttributeEvent = new MemberAttributeEvent(client.getCluster(), target, operationType, key, value); clusterService.fireMemberAttributeEvent(memberAttributeEvent); break; } } } private void applyMemberListChanges() { updateMembersRef(); LOGGER.info(clusterService.membersString()); } private void fireMembershipEvent(List<MembershipEvent> events) { for (MembershipEvent event : events) { clusterService.fireMembershipEvent(event); } } private List<MembershipEvent> detectMembershipEvents(Map<String, MemberImpl> prevMembers) { final List<MembershipEvent> events = new LinkedList<MembershipEvent>(); final Set<Member> eventMembers = Collections.unmodifiableSet(new LinkedHashSet<Member>(members)); for (MemberImpl member : members) { final MemberImpl former = prevMembers.remove(member.getUuid()); if (former == null) { events.add( new MembershipEvent( client.getCluster(), member, MembershipEvent.MEMBER_ADDED, eventMembers)); } } for (MemberImpl member : prevMembers.values()) { events.add( new MembershipEvent( client.getCluster(), member, MembershipEvent.MEMBER_REMOVED, eventMembers)); if (clusterService.getMember(member.getAddress()) == null) { final Connection connection = connectionManager.getConnection(member.getAddress()); if (connection != null) { connectionManager.destroyConnection(connection); } } } return events; } private void memberAdded(MemberImpl member) { members.add(member); applyMemberListChanges(); MembershipEvent event = new MembershipEvent( client.getCluster(), member, ClientInitialMembershipEvent.MEMBER_ADDED, Collections.unmodifiableSet(new LinkedHashSet<Member>(members))); clusterService.fireMembershipEvent(event); } private void updateMembersRef() { final Map<Address, MemberImpl> map = new LinkedHashMap<Address, MemberImpl>(members.size()); for (MemberImpl member : members) { map.put(member.getAddress(), member); } clusterService.setMembersRef(Collections.unmodifiableMap(map)); } }
public class Worker { static final ILogger log = Logger.getLogger(Worker.class); private HazelcastInstance serverInstance; private HazelcastInstance clientInstance; private String hzFile; private String clientHzFile; private String workerMode; private String workerId; private volatile Test test; private final BlockingQueue<TestCommandRequest> requestQueue = new LinkedBlockingQueue<TestCommandRequest>(); private final BlockingQueue<TestCommandResponse> responseQueue = new LinkedBlockingQueue<TestCommandResponse>(); public void start() throws Exception { if ("server".equals(workerMode)) { this.serverInstance = createServerHazelcastInstance(); } else if ("client".equals(workerMode)) { this.clientInstance = createClientHazelcastInstance(); } else if ("mixed".equals(workerMode)) { this.serverInstance = createServerHazelcastInstance(); this.clientInstance = createClientHazelcastInstance(); } else { throw new IllegalStateException("Unknown worker mode:" + workerMode); } new TestCommandRequestProcessingThread().start(); new SocketThread().start(); signalStartToAgent(); } private void signalStartToAgent() { String address; if (serverInstance == null) { address = "client:" + getHostAddress(); } else { InetSocketAddress socketAddress = serverInstance.getCluster().getLocalMember().getSocketAddress(); address = socketAddress.getAddress().getHostAddress() + ":" + socketAddress.getPort(); } File file = new File("worker.address"); writeObject(address, file); } private HazelcastInstance createClientHazelcastInstance() throws Exception { log.info("Creating Client HazelcastInstance"); XmlClientConfigBuilder configBuilder = new XmlClientConfigBuilder(clientHzFile); ClientConfig clientConfig = configBuilder.build(); HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig); log.info("Successfully created Client HazelcastInstance"); return client; } private HazelcastInstance createServerHazelcastInstance() throws Exception { log.info("Creating Server HazelcastInstance"); XmlConfigBuilder configBuilder = new XmlConfigBuilder(hzFile); Config config = configBuilder.build(); HazelcastInstance server = Hazelcast.newHazelcastInstance(config); log.info("Successfully created Server HazelcastInstance"); return server; } private static void logInterestingSystemProperties() { logSystemProperty("java.class.path"); logSystemProperty("java.home"); logSystemProperty("java.vendor"); logSystemProperty("java.vendor.url"); logSystemProperty("sun.java.command"); logSystemProperty("java.version"); logSystemProperty("os.arch"); logSystemProperty("os.name"); logSystemProperty("os.version"); logSystemProperty("user.dir"); logSystemProperty("user.home"); logSystemProperty("user.name"); logSystemProperty("STABILIZER_HOME"); logSystemProperty("hazelcast.logging.type"); logSystemProperty("log4j.configuration"); } private static void logSystemProperty(String name) { log.info(format("%s=%s", name, System.getProperty(name))); } public static void main(String[] args) { log.info("Starting Stabilizer Worker"); try { logInputArguments(); logInterestingSystemProperties(); String workerId = System.getProperty("workerId"); log.info("Worker id:" + workerId); String workerHzFile = args[0]; log.info("Worker hz config file:" + workerHzFile); log.info(fileAsText(new File(workerHzFile))); String clientHzFile = args[1]; log.info("Client hz config file:" + clientHzFile); log.info(fileAsText(new File(clientHzFile))); String workerMode = System.getProperty("workerMode"); log.info("Worker mode:" + workerMode); Worker worker = new Worker(); worker.workerId = workerId; worker.hzFile = workerHzFile; worker.clientHzFile = clientHzFile; worker.workerMode = workerMode; worker.start(); log.info("Successfully started Hazelcast Stabilizer Worker:" + workerId); } catch (Throwable e) { ExceptionReporter.report(e); System.exit(1); } } private static void logInputArguments() { List<String> inputArguments = ManagementFactory.getRuntimeMXBean().getInputArguments(); log.info("jvm input arguments = " + inputArguments); } private class SocketThread extends Thread { @Override public void run() { for (; ; ) { try { List<TestCommandRequest> requests = execute(WorkerJvmManager.SERVICE_POLL_WORK, workerId); for (TestCommandRequest request : requests) { requestQueue.add(request); } TestCommandResponse response = responseQueue.poll(1, TimeUnit.SECONDS); if (response == null) { continue; } sendResponse(asList(response)); List<TestCommandResponse> responses = new LinkedList<TestCommandResponse>(); responseQueue.drainTo(responses); sendResponse(responses); } catch (Throwable e) { ExceptionReporter.report(e); } } } private void sendResponse(List<TestCommandResponse> responses) throws Exception { for (TestCommandResponse response : responses) { execute(WorkerJvmManager.COMMAND_PUSH_RESPONSE, workerId, response); } } // we create a new socket for every request because don't want to depend on the state of a // socket // since we are going to do nasty stuff. private <E> E execute(String service, Object... args) throws Exception { Socket socket = new Socket(InetAddress.getByName(null), WorkerJvmManager.PORT); try { ObjectOutputStream oos = new ObjectOutputStream(socket.getOutputStream()); oos.writeObject(service); for (Object arg : args) { oos.writeObject(arg); } oos.flush(); ObjectInputStream in = new ObjectInputStream(socket.getInputStream()); Object response = in.readObject(); if (response instanceof TerminateWorkerException) { System.exit(0); } if (response instanceof Exception) { Exception exception = (Exception) response; Utils.fixRemoteStackTrace(exception, Thread.currentThread().getStackTrace()); throw exception; } return (E) response; } finally { Utils.closeQuietly(socket); } } } private class TestCommandRequestProcessingThread extends Thread { @Override public void run() { for (; ; ) { try { TestCommandRequest request = requestQueue.take(); if (request == null) { throw new NullPointerException("request can't be null"); } doProcess(request.id, request.task); } catch (Throwable e) { ExceptionReporter.report(e); } } } private void doProcess(long id, TestCommand command) { Object result = null; try { if (command instanceof InitTestCommand) { process((InitTestCommand) command); } else if (command instanceof StartTestCommand) { process((StartTestCommand) command); } else if (command instanceof StopTestCommand) { process((StopTestCommand) command); } else if (command instanceof GenericTestCommand) { result = process((GenericTestCommand) command); } else if (command instanceof GetOperationCountTestCommand) { result = process((GetOperationCountTestCommand) command); } else { throw new RuntimeException("Unhandled task:" + command.getClass()); } } catch (Throwable e) { result = e; } TestCommandResponse response = new TestCommandResponse(); response.commandId = id; response.result = result; responseQueue.add(response); } private Long process(GetOperationCountTestCommand command) { return test.getOperationCount(); } private void process(StartTestCommand testCommand) throws Exception { try { log.info("Starting test"); if (test == null) { throw new IllegalStateException("No running test found"); } boolean passive = false; if (testCommand.clientOnly && clientInstance == null) { passive = true; } test.start(passive); } catch (Exception e) { log.severe("Failed to start test", e); throw e; } } public Object process(GenericTestCommand genericTestTask) throws Throwable { String methodName = genericTestTask.methodName; try { log.info("Calling test." + methodName + "()"); if (test == null) { throw new IllegalStateException("No running test to execute test." + methodName + "()"); } Method method = test.getClass().getMethod(methodName); Object o = method.invoke(test); log.info("Finished calling test." + methodName + "()"); return o; } catch (InvocationTargetException e) { log.severe(format("Failed to execute test.%s()", methodName), e); throw e.getTargetException(); } catch (Exception e) { log.severe(format("Failed to execute test.%s()", methodName), e); throw e; } } private void process(InitTestCommand initTestCommand) throws Exception { try { TestCase testCase = initTestCommand.testCase; log.info("Init Test:\n" + testCase); String clazzName = testCase.getClassname(); TestDependencies dependencies = new TestDependencies(); dependencies.clientInstance = clientInstance; dependencies.serverInstance = serverInstance; dependencies.testId = testCase.getId(); test = (Test) InitTestCommand.class.getClassLoader().loadClass(clazzName).newInstance(); test.init(dependencies); bindProperties(test, testCase); if (serverInstance != null) { serverInstance.getUserContext().put(Test.TEST_INSTANCE, test); } } catch (Exception e) { log.severe("Failed to init Test", e); throw e; } } public void process(StopTestCommand stopTask) throws Exception { try { log.info("Calling test.stop"); if (test == null) { throw new IllegalStateException("No test to stop"); } test.stop(stopTask.timeoutMs); log.info("Finished calling test.stop()"); } catch (Exception e) { log.severe("Failed to execute test.stop", e); throw e; } } } }
/** Server side XaResource implementation */ public final class XAResourceImpl extends AbstractDistributedObject<XAService> implements HazelcastXAResource { private static final int DEFAULT_TIMEOUT_SECONDS = (int) MILLISECONDS.toSeconds(TransactionOptions.DEFAULT_TIMEOUT_MILLIS); private static final ILogger LOGGER = Logger.getLogger(XAResourceImpl.class); private final ConcurrentMap<Long, TransactionContext> threadContextMap = new ConcurrentHashMap<Long, TransactionContext>(); private final ConcurrentMap<Xid, List<TransactionContext>> xidContextMap = new ConcurrentHashMap<Xid, List<TransactionContext>>(); private final String groupName; private final AtomicInteger timeoutInSeconds = new AtomicInteger(DEFAULT_TIMEOUT_SECONDS); public XAResourceImpl(NodeEngine nodeEngine, XAService service) { super(nodeEngine, service); GroupConfig groupConfig = nodeEngine.getConfig().getGroupConfig(); groupName = groupConfig.getName(); } @Override public void start(Xid xid, int flags) throws XAException { long threadId = currentThreadId(); TransactionContext threadContext = threadContextMap.get(currentThreadId()); switch (flags) { case TMNOFLAGS: List<TransactionContext> contexts = new CopyOnWriteArrayList<TransactionContext>(); List<TransactionContext> currentContexts = xidContextMap.putIfAbsent(xid, contexts); if (currentContexts != null) { throw new XAException("There is already TransactionContexts for the given xid: " + xid); } TransactionContext context = createTransactionContext(xid); contexts.add(context); threadContextMap.put(threadId, context); break; case TMRESUME: case TMJOIN: List<TransactionContext> contextList = xidContextMap.get(xid); if (contextList == null) { throw new XAException("There is no TransactionContexts for the given xid: " + xid); } if (threadContext == null) { threadContext = createTransactionContext(xid); threadContextMap.put(threadId, threadContext); contextList.add(threadContext); } break; default: throw new XAException("Unknown flag!!! " + flags); } } private TransactionContext createTransactionContext(Xid xid) { XAService xaService = getService(); TransactionContext context = xaService.newXATransactionContext(xid, timeoutInSeconds.get()); getTransaction(context).begin(); return context; } @Override public void end(Xid xid, int flags) throws XAException { long threadId = currentThreadId(); TransactionContext threadContext = threadContextMap.remove(threadId); if (threadContext == null && LOGGER.isFinestEnabled()) { LOGGER.finest("There is no TransactionContext for the current thread: " + threadId); } List<TransactionContext> contexts = xidContextMap.get(xid); if (contexts == null && LOGGER.isFinestEnabled()) { LOGGER.finest("There is no TransactionContexts for the given xid: " + xid); } } @Override public int prepare(Xid xid) throws XAException { List<TransactionContext> contexts = xidContextMap.get(xid); if (contexts == null) { throw new XAException("There is no TransactionContexts for the given xid: " + xid); } for (TransactionContext context : contexts) { Transaction transaction = getTransaction(context); transaction.prepare(); } return XA_OK; } @Override public void commit(Xid xid, boolean onePhase) throws XAException { List<TransactionContext> contexts = xidContextMap.remove(xid); if (contexts == null && onePhase) { throw new XAException("There is no TransactionContexts for the given xid: " + xid); } if (contexts == null) { finalizeTransactionRemotely(xid, true); return; } for (TransactionContext context : contexts) { Transaction transaction = getTransaction(context); if (onePhase) { transaction.prepare(); } transaction.commit(); } clearRemoteTransactions(xid); } @Override public void rollback(Xid xid) throws XAException { List<TransactionContext> contexts = xidContextMap.remove(xid); if (contexts == null) { finalizeTransactionRemotely(xid, false); return; } for (TransactionContext context : contexts) { getTransaction(context).rollback(); } clearRemoteTransactions(xid); } private void finalizeTransactionRemotely(Xid xid, boolean isCommit) throws XAException { NodeEngine nodeEngine = getNodeEngine(); InternalPartitionService partitionService = nodeEngine.getPartitionService(); OperationService operationService = nodeEngine.getOperationService(); SerializableXID serializableXID = new SerializableXID( xid.getFormatId(), xid.getGlobalTransactionId(), xid.getBranchQualifier()); Data xidData = nodeEngine.toData(serializableXID); int partitionId = partitionService.getPartitionId(xidData); FinalizeRemoteTransactionOperation operation = new FinalizeRemoteTransactionOperation(xidData, isCommit); InternalCompletableFuture<Integer> future = operationService.invokeOnPartition(SERVICE_NAME, operation, partitionId); Integer errorCode; try { errorCode = future.get(); } catch (Exception e) { throw ExceptionUtil.rethrow(e); } if (errorCode != null) { throw new XAException(errorCode); } } private void clearRemoteTransactions(Xid xid) { NodeEngine nodeEngine = getNodeEngine(); InternalPartitionService partitionService = nodeEngine.getPartitionService(); OperationService operationService = nodeEngine.getOperationService(); SerializableXID serializableXID = new SerializableXID( xid.getFormatId(), xid.getGlobalTransactionId(), xid.getBranchQualifier()); Data xidData = nodeEngine.toData(serializableXID); int partitionId = partitionService.getPartitionId(xidData); ClearRemoteTransactionOperation operation = new ClearRemoteTransactionOperation(xidData); operationService.invokeOnPartition(SERVICE_NAME, operation, partitionId); } @Override public void forget(Xid xid) throws XAException { List<TransactionContext> contexts = xidContextMap.remove(xid); if (contexts == null) { throw new XAException("No context with the given xid: " + xid); } clearRemoteTransactions(xid); } @Override public boolean isSameRM(XAResource xaResource) throws XAException { if (this == xaResource) { return true; } if (xaResource instanceof XAResourceImpl) { XAResourceImpl otherXaResource = (XAResourceImpl) xaResource; return groupName.equals(otherXaResource.groupName); } return xaResource.isSameRM(this); } @Override public Xid[] recover(int flag) throws XAException { NodeEngine nodeEngine = getNodeEngine(); XAService xaService = getService(); OperationService operationService = nodeEngine.getOperationService(); ClusterService clusterService = nodeEngine.getClusterService(); Collection<Member> memberList = clusterService.getMembers(); List<InternalCompletableFuture<SerializableList>> futureList = new ArrayList<InternalCompletableFuture<SerializableList>>(); for (Member member : memberList) { if (member.localMember()) { continue; } CollectRemoteTransactionsOperation op = new CollectRemoteTransactionsOperation(); Address address = member.getAddress(); InternalCompletableFuture<SerializableList> future = operationService.invokeOnTarget(SERVICE_NAME, op, address); futureList.add(future); } HashSet<SerializableXID> xids = new HashSet<SerializableXID>(); xids.addAll(xaService.getPreparedXids()); for (InternalCompletableFuture<SerializableList> future : futureList) { SerializableList xidSet = future.getSafely(); for (Data xidData : xidSet) { SerializableXID xid = nodeEngine.toObject(xidData); xids.add(xid); } } return xids.toArray(new SerializableXID[xids.size()]); } @Override public int getTransactionTimeout() throws XAException { return timeoutInSeconds.get(); } @Override public boolean setTransactionTimeout(int seconds) throws XAException { timeoutInSeconds.set(seconds == 0 ? DEFAULT_TIMEOUT_SECONDS : seconds); return true; } @Override public String getServiceName() { return SERVICE_NAME; } @Override public String getName() { return SERVICE_NAME; } @Override public TransactionContext getTransactionContext() { long threadId = Thread.currentThread().getId(); TransactionContext transactionContext = threadContextMap.get(threadId); if (transactionContext == null) { throw new IllegalStateException( "No TransactionContext associated with current thread :" + threadId); } return transactionContext; } public String getGroupName() { return groupName; } private Transaction getTransaction(TransactionContext context) { return ((XATransactionContextImpl) context).getTransaction(); } private long currentThreadId() { return Thread.currentThread().getId(); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("HazelcastXaResource {").append(groupName).append('}'); return sb.toString(); } }
/** * In this test we add listeners to a cache and record the number of events the listeners receive. * We compare those to the number of events we have generated using different cache operations. We * verify that no unexpected events have been received. */ public class ListenerICacheTest { private static final int PAUSE_FOR_LAST_EVENTS_SECONDS = 10; private static final ILogger LOGGER = Logger.getLogger(ListenerICacheTest.class); private enum Operation { PUT, PUT_EXPIRY, PUT_EXPIRY_ASYNC, GET_EXPIRY, GET_EXPIRY_ASYNC, REMOVE, REPLACE } public String basename = ListenerICacheTest.class.getSimpleName(); public int keyCount = 1000; public int maxExpiryDurationMs = 500; public boolean syncEvents = true; public double put = 0.8; public double putExpiry = 0.0; public double putAsyncExpiry = 0.0; public double getExpiry = 0.0; public double getAsyncExpiry = 0.0; public double remove = 0.1; public double replace = 0.1; private final OperationSelectorBuilder<Operation> builder = new OperationSelectorBuilder<Operation>(); private IList<Counter> results; private IList<ICacheEntryListener> listeners; private ICache<Integer, Long> cache; private ICacheEntryListener<Integer, Long> listener; private ICacheEntryEventFilter<Integer, Long> filter; @Setup public void setup(TestContext testContext) { HazelcastInstance hazelcastInstance = testContext.getTargetInstance(); results = hazelcastInstance.getList(basename); listeners = hazelcastInstance.getList(basename + "listeners"); cache = CacheUtils.getCache(hazelcastInstance, basename); listener = new ICacheEntryListener<Integer, Long>(); filter = new ICacheEntryEventFilter<Integer, Long>(); CacheEntryListenerConfiguration<Integer, Long> config = new MutableCacheEntryListenerConfiguration<Integer, Long>( FactoryBuilder.factoryOf(listener), FactoryBuilder.factoryOf(filter), false, syncEvents); cache.registerCacheEntryListener(config); builder .addOperation(Operation.PUT, put) .addOperation(Operation.PUT_EXPIRY, putExpiry) .addOperation(Operation.PUT_EXPIRY_ASYNC, putAsyncExpiry) .addOperation(Operation.GET_EXPIRY, getExpiry) .addOperation(Operation.GET_EXPIRY_ASYNC, getAsyncExpiry) .addOperation(Operation.REMOVE, remove) .addOperation(Operation.REPLACE, replace); } @Verify(global = false) public void localVerify() { LOGGER.info(basename + " Listener " + listener); LOGGER.info(basename + " Filter " + filter); } @Verify public void globalVerify() { Counter totalCounter = new Counter(); for (Counter counter : results) { totalCounter.add(counter); } LOGGER.info(basename + " " + totalCounter + " from " + results.size() + " Worker threads"); ICacheEntryListener totalEvents = new ICacheEntryListener(); for (ICacheEntryListener entryListener : listeners) { totalEvents.add(entryListener); } LOGGER.info(basename + " totalEvents: " + totalEvents); assertEquals(basename + " unexpected events found", 0, totalEvents.getUnexpected()); } @RunWithWorker public Worker run() { return new Worker(); } private final class Worker extends AbstractWorker<Operation> { private final Counter counter = new Counter(); private Worker() { super(builder); } @Override protected void timeStep(Operation operation) throws Exception { int expiryDuration = randomInt(maxExpiryDurationMs); ExpiryPolicy expiryPolicy = new CreatedExpiryPolicy(new Duration(TimeUnit.MILLISECONDS, expiryDuration)); int key = randomInt(keyCount); switch (operation) { case PUT: cache.put(key, getRandom().nextLong()); counter.put++; break; case PUT_EXPIRY: cache.put(key, getRandom().nextLong(), expiryPolicy); counter.putExpiry++; break; case PUT_EXPIRY_ASYNC: cache.putAsync(key, getRandom().nextLong(), expiryPolicy); counter.putAsyncExpiry++; break; case GET_EXPIRY: cache.get(key, expiryPolicy); counter.getExpiry++; break; case GET_EXPIRY_ASYNC: Future<Long> future = cache.getAsync(key, expiryPolicy); future.get(); counter.getAsyncExpiry++; break; case REMOVE: if (cache.remove(key)) { counter.remove++; } break; case REPLACE: if (cache.replace(key, getRandom().nextLong())) { counter.replace++; } break; default: throw new UnsupportedOperationException(); } } @Override protected void afterRun() { results.add(counter); } @Override public void afterCompletion() { listeners.add(listener); sleepSeconds(PAUSE_FOR_LAST_EVENTS_SECONDS); } } private static class Counter implements Serializable { public long put; public long putExpiry; public long putAsyncExpiry; public long getExpiry; public long getAsyncExpiry; public long remove; public long replace; public void add(Counter counter) { put += counter.put; putExpiry += counter.putExpiry; putAsyncExpiry += counter.putAsyncExpiry; getExpiry += counter.getExpiry; getAsyncExpiry += counter.getAsyncExpiry; remove += counter.remove; replace += counter.replace; } public String toString() { return "Counter{" + "put=" + put + ", putExpiry=" + putExpiry + ", putAsyncExpiry=" + putAsyncExpiry + ", getExpiry=" + getExpiry + ", getAsyncExpiry=" + getAsyncExpiry + ", remove=" + remove + ", replace=" + replace + '}'; } } }
public class CloudyUtility { static final ILogger logger = Logger.getLogger(CloudyUtility.class.getName()); public static String getQueryString(Map<String, String> attributes) { StringBuilder query = new StringBuilder(); for (Iterator<String> iterator = attributes.keySet().iterator(); iterator.hasNext(); ) { String key = iterator.next(); String value = attributes.get(key); query .append(AwsURLEncoder.urlEncode(key)) .append("=") .append(AwsURLEncoder.urlEncode(value)) .append("&"); } String result = query.toString(); if (result != null && !result.equals("")) result = "?" + result.substring(0, result.length() - 1); return result; } public static Object unmarshalTheResponse(InputStream stream, AwsConfig awsConfig) throws IOException { Object o = parse(stream, awsConfig); return o; } private static Object parse(InputStream in, AwsConfig awsConfig) { final DocumentBuilder builder; try { builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document doc = builder.parse(in); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); Util.streamXML(doc, baos); final byte[] bytes = baos.toByteArray(); // final ByteArrayInputStream bais = new ByteArrayInputStream(bytes); // Reader reader = new BufferedReader(new InputStreamReader(bais)); // int n; // char[] buffer = new char[1024]; // Writer writer = new StringWriter(); // while ((n = reader.read(buffer)) != -1) { // writer.write(buffer, 0, n); // } // System.out.println(writer.toString()); Element element = doc.getDocumentElement(); NodeHolder elementNodeHolder = new NodeHolder(element); List<String> names = new ArrayList<String>(); List<NodeHolder> reservationset = elementNodeHolder.getSubNodes("reservationset"); for (NodeHolder reservation : reservationset) { List<NodeHolder> items = reservation.getSubNodes("item"); for (NodeHolder item : items) { NodeHolder instancesset = item.getSub("instancesset"); names.addAll(instancesset.getList("privateipaddress", awsConfig)); } } return names; } catch (Exception e) { logger.log(Level.WARNING, e.getMessage(), e); } return new ArrayList<String>(); } static class NodeHolder { Node node; public NodeHolder(Node node) { this.node = node; } public NodeHolder getSub(String name) { if (node != null) { for (org.w3c.dom.Node node : new AbstractXmlConfigHelper.IterableNodeList(this.node.getChildNodes())) { String nodeName = cleanNodeName(node.getNodeName()); if (name.equals(nodeName)) { return new NodeHolder(node); } } } return new NodeHolder(null); } public List<NodeHolder> getSubNodes(String name) { List<NodeHolder> list = new ArrayList<NodeHolder>(); if (node != null) { for (org.w3c.dom.Node node : new AbstractXmlConfigHelper.IterableNodeList(this.node.getChildNodes())) { String nodeName = cleanNodeName(node.getNodeName()); if (name.equals(nodeName)) { list.add(new NodeHolder(node)); } } } return list; } public List<String> getList(String name, AwsConfig awsConfig) { List<String> list = new ArrayList<String>(); if (node != null) { for (org.w3c.dom.Node node : new AbstractXmlConfigHelper.IterableNodeList(this.node.getChildNodes())) { String nodeName = cleanNodeName(node.getNodeName()); if ("item".equals(nodeName)) { if (new NodeHolder(node) .getSub("instancestate") .getSub("name") .getNode() .getFirstChild() .getNodeValue() .equals("running")) { String ip = new NodeHolder(node).getSub(name).getNode().getFirstChild().getNodeValue(); boolean passed = applyFilter(awsConfig, node); if (ip != null && passed) { list.add(ip); } } } } } return list; } private boolean applyFilter(AwsConfig awsConfig, Node node) { boolean inGroup = applyFilter(node, awsConfig.getSecurityGroupName(), "groupset", "groupname"); return inGroup && applyTagFilter(node, awsConfig.getTagKey(), awsConfig.getTagValue()); } private boolean applyFilter(Node node, String filter, String set, String filterField) { boolean passed = (nullOrEmpty(filter)); if (!passed) { for (NodeHolder group : new NodeHolder(node).getSub(set).getSubNodes("item")) { NodeHolder nh = group.getSub(filterField); if (nh != null && nh.getNode().getFirstChild() != null && filter.equals(nh.getNode().getFirstChild().getNodeValue())) { passed = true; } } } return passed; } private boolean applyTagFilter(Node node, String keyExpected, String valueExpected) { if (nullOrEmpty(keyExpected)) { return true; } else { for (NodeHolder group : new NodeHolder(node).getSub("tagset").getSubNodes("item")) { if (keyEquals(keyExpected, group) && (nullOrEmpty(valueExpected) || valueEquals(valueExpected, group))) { return true; } } return false; } } private boolean valueEquals(String valueExpected, NodeHolder group) { NodeHolder nhValue = group.getSub("value"); return nhValue != null && nhValue.getNode().getFirstChild() != null && valueExpected.equals(nhValue.getNode().getFirstChild().getNodeValue()); } private boolean nullOrEmpty(String keyExpected) { return keyExpected == null || keyExpected.equals(""); } private boolean keyEquals(String keyExpected, NodeHolder group) { NodeHolder nhKey = group.getSub("key"); return nhKey != null && nhKey.getNode().getFirstChild() != null && keyExpected.equals(nhKey.getNode().getFirstChild().getNodeValue()); } public Node getNode() { return node; } } }
/** * In this test we are using different predicate methods to execute a query on a map of {@link * Employee} objects. * * <p>This test also concurrently updates and modifies the employee objects in the map while the * predicate queries are executing. The test may also destroy the map while while predicate are * executing. We verify the result of every query to ensure that the objects returned fit the * requirements of the query. */ public class MapPredicateTest { private enum Operation { PREDICATE_BUILDER, SQL_STRING, PAGING_PREDICATE, UPDATE_EMPLOYEE, DESTROY_MAP } private static final ILogger LOGGER = Logger.getLogger(MapPredicateTest.class); public String basename = MapPredicateTest.class.getSimpleName(); public int threadCount = 3; public int keyCount = 100; public int pageSize = 5; public double predicateBuilderProb = 0.2; public double sqlStringProb = 0.2; public double pagePredicateProb = 0.2; public double updateEmployeeProb = 0.3; public double destroyProb = 0.1; private final OperationSelectorBuilder<Operation> operationSelectorBuilder = new OperationSelectorBuilder<Operation>(); private IMap<Integer, Employee> map; private IList<PredicateOperationCounter> operationCounterList; @Setup public void setUp(TestContext testContext) throws Exception { HazelcastInstance targetInstance = testContext.getTargetInstance(); map = targetInstance.getMap(basename); operationCounterList = targetInstance.getList(basename + "OperationCounter"); operationSelectorBuilder .addOperation(Operation.PREDICATE_BUILDER, predicateBuilderProb) .addOperation(Operation.SQL_STRING, sqlStringProb) .addOperation(Operation.PAGING_PREDICATE, pagePredicateProb) .addOperation(Operation.UPDATE_EMPLOYEE, updateEmployeeProb) .addOperation(Operation.DESTROY_MAP, destroyProb); } @Warmup(global = true) public void globalWarmup() { initMap(); } private void initMap() { MapStreamer<Integer, Employee> streamer = MapStreamerFactory.getInstance(map); for (int i = 0; i < keyCount; i++) { Employee employee = new Employee(i); streamer.pushEntry(employee.getId(), employee); } streamer.await(); } @Verify(global = true) public void globalVerify() throws Exception { PredicateOperationCounter total = new PredicateOperationCounter(); for (PredicateOperationCounter operationCounter : operationCounterList) { total.add(operationCounter); } LOGGER.info(format("Operation counters from %s: %s", basename, total)); } @RunWithWorker public Worker createWorker() { return new Worker(); } private class Worker extends AbstractWorker<Operation> { private final PredicateOperationCounter operationCounter = new PredicateOperationCounter(); private long lastUpdateMs = System.currentTimeMillis(); private long iterationsLastMinute = 0; private long maxLastMinute = Long.MIN_VALUE; private long minLastMinute = Long.MAX_VALUE; private long spendTimeMs = 0; public Worker() { super(operationSelectorBuilder); LOGGER.info("Starting worker: " + this + " for " + MapPredicateTest.class.getSimpleName()); } @Override public void timeStep(Operation operation) { long startMs = System.currentTimeMillis(); switch (operation) { case PREDICATE_BUILDER: predicateBuilder(); break; case SQL_STRING: sqlString(); break; case PAGING_PREDICATE: pagingPredicate(); break; case UPDATE_EMPLOYEE: updateEmployee(); break; case DESTROY_MAP: destroyMap(); break; default: throw new UnsupportedOperationException(); } long nowMs = System.currentTimeMillis(); long durationMs = nowMs - startMs; maxLastMinute = Math.max(durationMs, maxLastMinute); minLastMinute = Math.min(durationMs, minLastMinute); iterationsLastMinute++; spendTimeMs += durationMs; if (lastUpdateMs + SECONDS.toMillis(60) < nowMs) { double avg = spendTimeMs / (double) iterationsLastMinute; double perf = (iterationsLastMinute * 1000d) / (double) spendTimeMs; LOGGER.info( format( "last minute: iterations=%d, min=%d ms, max=%d ms, avg=%.2f ms, perf=%.2f predicates/second", iterationsLastMinute, minLastMinute, maxLastMinute, avg, perf)); maxLastMinute = Long.MIN_VALUE; minLastMinute = Long.MAX_VALUE; iterationsLastMinute = 0; lastUpdateMs = nowMs; } } @Override protected void afterRun() { operationCounterList.add(operationCounter); } private void predicateBuilder() { int age = randomInt(Employee.MAX_AGE); String name = Employee.getRandomName(); // TODO: Still broken because it relies on reflection which is dog slow, so we need an // explicit AgeNamePredicate EntryObject entryObject = new PredicateBuilder().getEntryObject(); Predicate agePredicate = entryObject.get("age").lessThan(age); Predicate ageNamePredicate = entryObject.get("name").equal(name).and(agePredicate); Collection<Employee> employees = map.values(ageNamePredicate); for (Employee emp : employees) { assertTrue(basename + ": " + emp + " not matching " + ageNamePredicate, emp.getAge() < age); assertTrue( basename + ": " + emp + " not matching " + ageNamePredicate, emp.getName().equals(name)); } operationCounter.predicateBuilderCount++; } private void sqlString() { boolean active = getRandom().nextBoolean(); int age = randomInt(Employee.MAX_AGE); SqlPredicate predicate = new SqlPredicate("active=" + active + " AND age >" + age); Collection<Employee> employees = map.values(predicate); for (Employee emp : employees) { assertTrue(basename + ": " + emp + " not matching " + predicate, emp.isActive() == active); assertTrue(basename + ": " + emp + " not matching " + predicate, emp.getAge() > age); } operationCounter.sqlStringCount++; } private void pagingPredicate() { double maxSal = getRandom().nextDouble() * Employee.MAX_SALARY; Predicate predicate = Predicates.lessThan("salary", maxSal); PagingPredicate pagingPredicate = new PagingPredicate(predicate, pageSize); Collection<Employee> employees; do { employees = map.values(pagingPredicate); for (Employee emp : employees) { assertTrue( basename + ": " + emp + " not matching " + predicate, emp.getSalary() < maxSal); } pagingPredicate.nextPage(); } while (!employees.isEmpty()); operationCounter.pagePredicateCount++; } private void updateEmployee() { Integer key = randomInt(keyCount); Employee employee = map.get(key); if (employee != null) { employee.randomizeProperties(); map.put(key, employee); operationCounter.updateEmployeeCount++; } } private void destroyMap() { map.destroy(); initMap(); operationCounter.destroyCount++; } } }
public class MapDataIntegrityTest { private static final ILogger LOGGER = Logger.getLogger(MapDataIntegrityTest.class); // properties public int mapIntegrityThreadCount = 8; public int stressThreadCount = 8; public int totalIntegrityKeys = 10000; public int totalStressKeys = 1000; public int valueSize = 1000; public boolean mapLoad = true; public boolean doRunAsserts = true; public String basename = this.getClass().getCanonicalName(); private TestContext testContext; private HazelcastInstance targetInstance; private String testId; private IMap<Integer, byte[]> integrityMap; private IMap<Integer, byte[]> stressMap; private MapIntegrityThread[] integrityThreads; private byte[] value; @Setup public void setup(TestContext testContext) throws Exception { this.testContext = testContext; targetInstance = testContext.getTargetInstance(); testId = testContext.getTestId(); integrityMap = targetInstance.getMap(basename + "Integrity"); stressMap = targetInstance.getMap(basename + "Stress"); integrityThreads = new MapIntegrityThread[mapIntegrityThreadCount]; value = new byte[valueSize]; Random random = new Random(); random.nextBytes(value); if (mapLoad && isMemberNode(targetInstance)) { PartitionService partitionService = targetInstance.getPartitionService(); final Set<Partition> partitionSet = partitionService.getPartitions(); for (Partition partition : partitionSet) { while (partition.getOwner() == null) { Thread.sleep(1000); } } LOGGER.info(testId + ": " + partitionSet.size() + " partitions"); Member localMember = targetInstance.getCluster().getLocalMember(); for (int i = 0; i < totalIntegrityKeys; i++) { Partition partition = partitionService.getPartition(i); if (localMember.equals(partition.getOwner())) { integrityMap.put(i, value); } } LOGGER.info( testId + ": integrityMap=" + integrityMap.getName() + " size=" + integrityMap.size()); Config config = targetInstance.getConfig(); MapConfig mapConfig = config.getMapConfig(integrityMap.getName()); LOGGER.info(testId + ": " + mapConfig); } } @Verify(global = false) public void verify() throws Exception { if (isMemberNode(targetInstance)) { LOGGER.info(testId + ": cluster size =" + targetInstance.getCluster().getMembers().size()); } LOGGER.info( testId + ": integrityMap=" + integrityMap.getName() + " size=" + integrityMap.size()); int totalErrorCount = 0; int totalNullValueCount = 0; for (MapIntegrityThread integrityThread : integrityThreads) { totalErrorCount += integrityThread.sizeErrorCount; totalNullValueCount += integrityThread.nullValueCount; } LOGGER.info(testId + ": total integrityMapSizeErrorCount=" + totalErrorCount); LOGGER.info(testId + ": total integrityMapNullValueCount=" + totalNullValueCount); assertEquals( testId + ": (verify) integrityMap=" + integrityMap.getName() + " map size ", totalIntegrityKeys, integrityMap.size()); assertEquals(testId + ": (verify) integrityMapSizeErrorCount=", 0, totalErrorCount); assertEquals(testId + ": (verify) integrityMapNullValueCount=", 0, totalNullValueCount); } @Run public void run() { ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId()); for (int i = 0; i < mapIntegrityThreadCount; i++) { integrityThreads[i] = new MapIntegrityThread(); spawner.spawn(integrityThreads[i]); } for (int i = 0; i < stressThreadCount; i++) { spawner.spawn(new StressThread()); } spawner.awaitCompletion(); } private class MapIntegrityThread implements Runnable { private final Random random = new Random(); private int nullValueCount = 0; private int sizeErrorCount = 0; public void run() { while (!testContext.isStopped()) { int key = random.nextInt(totalIntegrityKeys); byte[] val = integrityMap.get(key); int actualSize = integrityMap.size(); if (doRunAsserts) { assertNotNull( testId + ": integrityMap=" + integrityMap.getName() + " key " + key + " == null", val); assertEquals( testId + ": integrityMap=" + integrityMap.getName() + " map size ", totalIntegrityKeys, actualSize); } else { if (val == null) { nullValueCount++; } if (actualSize != totalIntegrityKeys) { sizeErrorCount++; } } } } } private class StressThread implements Runnable { private final Random random = new Random(); public void run() { while (!testContext.isStopped()) { int key = random.nextInt(totalStressKeys); stressMap.put(key, value); } } } }
/** ClientEndpoints are stored and managed thorough this class. */ public class ClientEndpointManager { private static final ILogger LOGGER = Logger.getLogger(ClientEndpointManager.class); private static final int DESTROY_ENDPOINT_DELAY_MS = 1111; private final ClientEngineImpl clientEngine; private final NodeEngine nodeEngine; private final ConcurrentMap<Connection, ClientEndpoint> endpoints = new ConcurrentHashMap<Connection, ClientEndpoint>(); public ClientEndpointManager(ClientEngineImpl clientEngine, NodeEngine nodeEngine) { this.clientEngine = clientEngine; this.nodeEngine = nodeEngine; } Set<ClientEndpoint> getEndpoints(String uuid) { Set<ClientEndpoint> endpointSet = new HashSet<ClientEndpoint>(); for (ClientEndpoint endpoint : endpoints.values()) { if (uuid.equals(endpoint.getUuid())) { endpointSet.add(endpoint); } } return endpointSet; } ClientEndpoint getEndpoint(Connection conn) { return endpoints.get(conn); } ClientEndpoint createEndpoint(Connection conn) { if (!conn.live()) { LOGGER.severe("Can't create and endpoint for a dead connection"); return null; } String clientUuid = UuidUtil.createClientUuid(conn.getEndPoint()); ClientEndpoint endpoint = new ClientEndpoint(clientEngine, conn, clientUuid); if (endpoints.putIfAbsent(conn, endpoint) != null) { LOGGER.severe("An endpoint already exists for connection:" + conn); } return endpoint; } void removeEndpoint(final ClientEndpoint endpoint) { removeEndpoint(endpoint, false); } void removeEndpoint(final ClientEndpoint endpoint, boolean closeImmediately) { endpoints.remove(endpoint.getConnection()); LOGGER.info("Destroying " + endpoint); try { endpoint.destroy(); } catch (LoginException e) { LOGGER.warning(e); } final Connection connection = endpoint.getConnection(); if (closeImmediately) { try { connection.close(); } catch (Throwable e) { LOGGER.warning("While closing client connection: " + connection, e); } } else { nodeEngine .getExecutionService() .schedule( new Runnable() { public void run() { if (connection.live()) { try { connection.close(); } catch (Throwable e) { LOGGER.warning("While closing client connection: " + e.toString()); } } } }, DESTROY_ENDPOINT_DELAY_MS, TimeUnit.MILLISECONDS); } clientEngine.sendClientEvent(endpoint); } void removeEndpoints(String memberUuid) { Iterator<ClientEndpoint> iterator = endpoints.values().iterator(); while (iterator.hasNext()) { ClientEndpoint endpoint = iterator.next(); String ownerUuid = endpoint.getPrincipal().getOwnerUuid(); if (memberUuid.equals(ownerUuid)) { iterator.remove(); removeEndpoint(endpoint, true); } } } void clear() { endpoints.clear(); } Collection<ClientEndpoint> getEndpoints() { return endpoints.values(); } int size() { return endpoints.size(); } }
abstract class ClientInvocationServiceSupport implements ClientInvocationService, ConnectionHeartbeatListener, ConnectionListener { private static final int WAIT_TIME_FOR_PACKETS_TO_BE_CONSUMED = 10; private static final int WAIT_TIME_FOR_PACKETS_TO_BE_CONSUMED_THRESHOLD = 5000; protected final HazelcastClientInstanceImpl client; protected final ClientConnectionManager connectionManager; protected final ClientPartitionService partitionService; protected final ClientExecutionService executionService; private final ILogger logger = Logger.getLogger(ClientInvocationService.class); private final ResponseThread responseThread; private final ConcurrentMap<Integer, ClientInvocation> callIdMap = new ConcurrentHashMap<Integer, ClientInvocation>(); private final ConcurrentMap<Integer, ClientListenerInvocation> eventHandlerMap = new ConcurrentHashMap<Integer, ClientListenerInvocation>(); private final AtomicInteger callIdIncrementer = new AtomicInteger(); private final ClientExceptionFactory clientExceptionFactory = new ClientExceptionFactory(); private volatile boolean isShutdown; public ClientInvocationServiceSupport(HazelcastClientInstanceImpl client) { this.client = client; this.connectionManager = client.getConnectionManager(); this.executionService = client.getClientExecutionService(); connectionManager.addConnectionListener(this); connectionManager.addConnectionHeartbeatListener(this); this.partitionService = client.getClientPartitionService(); responseThread = new ResponseThread( client.getThreadGroup(), client.getName() + ".response-", client.getClientConfig().getClassLoader()); responseThread.start(); } @Override public boolean isRedoOperation() { return client.getClientConfig().getNetworkConfig().isRedoOperation(); } protected void send(ClientInvocation invocation, ClientConnection connection) throws IOException { if (isShutdown) { throw new HazelcastClientNotActiveException("Client is shut down"); } registerInvocation(invocation); ClientMessage clientMessage = invocation.getClientMessage(); if (!isAllowedToSendRequest(connection, invocation) || !writeToConnection(connection, clientMessage)) { final int callId = clientMessage.getCorrelationId(); ClientInvocation clientInvocation = deRegisterCallId(callId); deRegisterEventHandler(callId); if (clientInvocation != null) { throw new IOException("Packet not send to " + connection.getRemoteEndpoint()); } else { if (logger.isFinestEnabled()) { logger.finest("Invocation not found to deregister for call id " + callId); } } } invocation.setSendConnection(connection); } private boolean writeToConnection(ClientConnection connection, ClientMessage clientMessage) { clientMessage.addFlag(ClientMessage.BEGIN_AND_END_FLAGS); return connection.write(clientMessage); } private boolean isAllowedToSendRequest(ClientConnection connection, ClientInvocation invocation) { if (!connection.isHeartBeating()) { if (invocation.shouldBypassHeartbeatCheck()) { // ping and removeAllListeners should be send even though heart is not beating return true; } if (logger.isFinestEnabled()) { logger.warning( "Connection is not heart-beating, won't write client message -> " + invocation.getClientMessage()); } return false; } return true; } private void registerInvocation(ClientInvocation clientInvocation) { short protocolVersion = client.getProtocolVersion(); final int correlationId = newCorrelationId(); clientInvocation.getClientMessage().setCorrelationId(correlationId).setVersion(protocolVersion); callIdMap.put(correlationId, clientInvocation); if (clientInvocation instanceof ClientListenerInvocation) { eventHandlerMap.put(correlationId, (ClientListenerInvocation) clientInvocation); } } private ClientInvocation deRegisterCallId(int callId) { return callIdMap.remove(callId); } private ClientInvocation deRegisterEventHandler(int callId) { return eventHandlerMap.remove(callId); } @Override public EventHandler getEventHandler(int callId) { final ClientListenerInvocation clientInvocation = eventHandlerMap.get(callId); if (clientInvocation == null) { return null; } return clientInvocation.getHandler(); } @Override public boolean removeEventHandler(Integer callId) { if (callId != null) { return eventHandlerMap.remove(callId) != null; } return false; } public void cleanResources( ConstructorFunction<Object, Throwable> responseCtor, ClientConnection connection) { final Iterator<Map.Entry<Integer, ClientInvocation>> iter = callIdMap.entrySet().iterator(); while (iter.hasNext()) { final Map.Entry<Integer, ClientInvocation> entry = iter.next(); final ClientInvocation invocation = entry.getValue(); if (connection.equals(invocation.getSendConnection())) { iter.remove(); invocation.notifyException(responseCtor.createNew(null)); eventHandlerMap.remove(entry.getKey()); } } final Iterator<ClientListenerInvocation> iterator = eventHandlerMap.values().iterator(); while (iterator.hasNext()) { final ClientInvocation invocation = iterator.next(); if (connection.equals(invocation.getSendConnection())) { iterator.remove(); invocation.notifyException(responseCtor.createNew(null)); } } } @Override public void heartBeatStarted(Connection connection) {} @Override public void heartBeatStopped(Connection connection) { ClientMessage request = ClientRemoveAllListenersCodec.encodeRequest(); ClientInvocation removeListenerInvocation = new ClientInvocation(client, request, connection); removeListenerInvocation.setBypassHeartbeatCheck(true); removeListenerInvocation.invoke(); final Address remoteEndpoint = connection.getEndPoint(); final Iterator<ClientListenerInvocation> iterator = eventHandlerMap.values().iterator(); final TargetDisconnectedException response = new TargetDisconnectedException(remoteEndpoint); while (iterator.hasNext()) { final ClientInvocation clientInvocation = iterator.next(); if (clientInvocation.getSendConnection().equals(connection)) { iterator.remove(); clientInvocation.notifyException(response); } } } @Override public void connectionAdded(Connection connection) {} @Override public void connectionRemoved(Connection connection) { cleanConnectionResources((ClientConnection) connection); } @Override public void cleanConnectionResources(ClientConnection connection) { if (connectionManager.isAlive()) { try { executionService.execute(new CleanResourcesTask(connection)); } catch (RejectedExecutionException e) { logger.warning("Execution rejected ", e); } } else { cleanResources( new ConstructorFunction<Object, Throwable>() { @Override public Throwable createNew(Object arg) { return new HazelcastClientNotActiveException("Client is shutting down!"); } }, connection); } } public boolean isShutdown() { return isShutdown; } public void shutdown() { isShutdown = true; responseThread.interrupt(); } private class CleanResourcesTask implements Runnable { private final ClientConnection connection; CleanResourcesTask(ClientConnection connection) { this.connection = connection; } @Override public void run() { waitForPacketsProcessed(); cleanResources( new ConstructorFunction<Object, Throwable>() { @Override public Throwable createNew(Object arg) { return new TargetDisconnectedException(connection.getRemoteEndpoint()); } }, connection); } private void waitForPacketsProcessed() { final long begin = System.currentTimeMillis(); int count = connection.getPacketCount(); while (count != 0) { try { Thread.sleep(WAIT_TIME_FOR_PACKETS_TO_BE_CONSUMED); } catch (InterruptedException e) { logger.warning(e); break; } long elapsed = System.currentTimeMillis() - begin; if (elapsed > WAIT_TIME_FOR_PACKETS_TO_BE_CONSUMED_THRESHOLD) { logger.warning("There are packets which are not processed " + count); break; } count = connection.getPacketCount(); } } } @Override public void handleClientMessage(ClientMessage message, Connection connection) { responseThread.workQueue.add(new ClientPacket((ClientConnection) connection, message)); } private static class ClientPacket { private final ClientConnection clientConnection; private final ClientMessage clientMessage; public ClientPacket(ClientConnection clientConnection, ClientMessage clientMessage) { this.clientConnection = clientConnection; this.clientMessage = clientMessage; } public ClientConnection getClientConnection() { return clientConnection; } public ClientMessage getClientMessage() { return clientMessage; } } private class ResponseThread extends Thread { private final BlockingQueue<ClientPacket> workQueue = new LinkedBlockingQueue<ClientPacket>(); public ResponseThread(ThreadGroup threadGroup, String name, ClassLoader classLoader) { super(threadGroup, name); setContextClassLoader(classLoader); } @Override public void run() { try { doRun(); } catch (OutOfMemoryError e) { onOutOfMemory(e); } catch (Throwable t) { logger.severe(t); } } private void doRun() { while (true) { ClientPacket task; try { task = workQueue.take(); } catch (InterruptedException e) { if (isShutdown) { return; } continue; } if (isShutdown) { return; } process(task); } } private void process(ClientPacket packet) { final ClientConnection conn = packet.getClientConnection(); try { handleClientMessage(packet.getClientMessage()); } catch (Exception e) { logger.severe("Failed to process task: " + packet + " on responseThread :" + getName(), e); } finally { conn.decrementPacketCount(); } } private void handleClientMessage(ClientMessage clientMessage) throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { int correlationId = clientMessage.getCorrelationId(); final ClientInvocation future = deRegisterCallId(correlationId); if (future == null) { logger.warning("No call for callId: " + correlationId + ", response: " + clientMessage); return; } if (ErrorCodec.TYPE == clientMessage.getMessageType()) { ErrorCodec exParameters = ErrorCodec.decode(clientMessage); Throwable exception = clientExceptionFactory.createException( exParameters.errorCode, exParameters.className, exParameters.message, exParameters.stackTrace, exParameters.causeErrorCode, exParameters.causeClassName); future.notifyException(exception); } else { future.notify(clientMessage); } } } private int newCorrelationId() { return callIdIncrementer.incrementAndGet(); } }
public final class HazelcastTestUtils { private static final ILogger LOGGER = Logger.getLogger(HazelcastTestUtils.class); private HazelcastTestUtils() {} public static String getPartitionDistributionInformation(HazelcastInstance hz) { Map<Member, Integer> partitionCountMap = new HashMap<Member, Integer>(); int totalPartitions = 0; for (Partition partition : hz.getPartitionService().getPartitions()) { totalPartitions++; Member member = partition.getOwner(); Integer count = partitionCountMap.get(member); if (count == null) { count = 0; } count++; partitionCountMap.put(member, count); } StringBuilder sb = new StringBuilder(); sb.append("total partitions:").append(totalPartitions).append("\n"); for (Map.Entry<Member, Integer> entry : partitionCountMap.entrySet()) { Member member = entry.getKey(); long count = entry.getValue(); double percentage = count * 100d / totalPartitions; sb.append(member) .append(" total=") .append(count) .append(" percentage=") .append(percentage) .append("%\n"); } return sb.toString(); } public static String getOperationCountInformation(HazelcastInstance hz) { Map<Member, Long> operationCountMap = getOperationCount(hz); long totalOps = 0; for (Long count : operationCountMap.values()) { totalOps += count; } StringBuilder sb = new StringBuilder(); sb.append("total operations: ").append(totalOps).append("\n"); for (Map.Entry<Member, Long> entry : operationCountMap.entrySet()) { Member member = entry.getKey(); long opsOnMember = entry.getValue(); double percentage = opsOnMember * 100d / totalOps; sb.append(member) .append(" operations: ") .append(opsOnMember) .append(" percentage: ") .append(percentage) .append("%\n"); } return sb.toString(); } public static Map<Member, Long> getOperationCount(HazelcastInstance hz) { IExecutorService executorService = hz.getExecutorService("operationCountExecutor"); Map<Member, Future<Long>> futures = new HashMap<Member, Future<Long>>(); for (Member member : hz.getCluster().getMembers()) { Future<Long> future = executorService.submitToMember(new GetOperationCount(), member); futures.put(member, future); } Map<Member, Long> result = new HashMap<Member, Long>(); for (Map.Entry<Member, Future<Long>> entry : futures.entrySet()) { try { Member member = entry.getKey(); Long value = entry.getValue().get(); if (value == null) { value = 0L; } result.put(member, value); } catch (InterruptedException e) { throw new TestException(e); } catch (ExecutionException e) { throw new TestException(e); } } return result; } public static void logPartitionStatistics( ILogger log, String basename, IMap<Object, Integer> map, boolean printSizes) { MapProxyImpl mapProxy = (MapProxyImpl) map; MapService mapService = (MapService) mapProxy.getService(); MapServiceContext mapServiceContext = mapService.getMapServiceContext(); Collection<Integer> localPartitions = mapServiceContext.getOwnedPartitions(); int localSize = 0; StringBuilder partitionIDs = new StringBuilder(); StringBuilder partitionSizes = new StringBuilder(); String separator = ""; for (int partitionId : localPartitions) { int partitionSize = mapServiceContext.getRecordStore(partitionId, map.getName()).size(); localSize += partitionSize; partitionIDs.append(separator).append(partitionId); partitionSizes.append(separator).append(partitionSize); separator = ", "; } log.info( format( "%s: Local partitions (count %d) (size %d) (avg %.2f) (IDs %s)%s", basename, localPartitions.size(), localSize, localSize / (float) localPartitions.size(), partitionIDs.toString(), printSizes ? format(" (sizes %s)", partitionSizes.toString()) : "")); } public static final class GetOperationCount implements Callable<Long>, HazelcastInstanceAware, Serializable { private static final long serialVersionUID = 2875034360565495907L; private transient HazelcastInstance hz; @Override public Long call() throws Exception { try { OperationService operationService = HazelcastTestUtils.getOperationService(hz); return operationService.getExecutedOperationCount(); } catch (NoSuchMethodError e) { LOGGER.warning(e); return -1L; } } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { this.hz = hazelcastInstance; } } public static void waitClusterSize(ILogger logger, HazelcastInstance hz, int clusterSize) throws InterruptedException { for (; ; ) { if (hz.getCluster().getMembers().size() >= clusterSize) { return; } logger.info("waiting cluster == " + clusterSize); Thread.sleep(1000); } } public static OperationService getOperationService(HazelcastInstance hz) { Node node = getNode(hz); if (node == null) { throw new NullPointerException("node is null in Hazelcast instance " + hz); } NodeEngineImpl nodeEngine = node.getNodeEngine(); try { return nodeEngine.getOperationService(); } catch (NoSuchMethodError e) { // fallback for a binary incompatible change (see commit http://git.io/vtfKU) return getOperationServiceViaReflection(nodeEngine); } } private static OperationService getOperationServiceViaReflection(NodeEngineImpl nodeEngine) { try { Method method = NodeEngineImpl.class.getMethod("getOperationService"); return (OperationService) method.invoke(nodeEngine); } catch (NoSuchMethodException e) { throw new IllegalStateException(e); } catch (InvocationTargetException e) { throw new IllegalStateException(e); } catch (IllegalAccessException e) { throw new IllegalStateException(e); } } public static Node getNode(HazelcastInstance hz) { HazelcastInstanceImpl impl = getHazelcastInstanceImpl(hz); return impl != null ? impl.node : null; } private static HazelcastInstanceImpl getHazelcastInstanceImpl(HazelcastInstance hz) { HazelcastInstanceImpl impl = null; if (hz instanceof HazelcastInstanceProxy) { return getObjectFromField(hz, "original"); } else if (hz instanceof HazelcastInstanceImpl) { impl = (HazelcastInstanceImpl) hz; } return impl; } /** * Returns the next {@code long} key owned by the given Hazelcast instance. * * @param instance Hazelcast instance to search next key for * @param lastKey last key to start search from * @return next key owned by given Hazelcast instance */ public static long nextKeyOwnedBy(HazelcastInstance instance, long lastKey) { Member localMember = instance.getCluster().getLocalMember(); PartitionService partitionService = instance.getPartitionService(); while (true) { Partition partition = partitionService.getPartition(lastKey); if (localMember.equals(partition.getOwner())) { return lastKey; } lastKey++; } } public static boolean isMemberNode(HazelcastInstance instance) { return instance instanceof HazelcastInstanceProxy; } public static boolean isClient(HazelcastInstance instance) { return !isMemberNode(instance); } public static void failOnVersionMismatch(String minVersion, String message) { BuildInfo buildInfo = BuildInfoProvider.getBuildInfo(); String actualVersion = buildInfo.getVersion(); LOGGER.info( "Compare version " + actualVersion + " with minimum version " + minVersion + ": " + isMinVersion(minVersion, actualVersion)); if (!isMinVersion(minVersion, actualVersion)) { fail(format(message, minVersion)); } } }
/** * Contains all the configuration to start a {@link com.hazelcast.core.HazelcastInstance}. A Config * can be created programmatically, but can also be configured using XML, see {@link * com.hazelcast.config.XmlConfigBuilder}. * * <p>Config instances can be shared between threads, but should not be modified after they are used * to create HazelcastInstances. */ @SuppressWarnings("checkstyle:classfanoutcomplexity") public class Config { private static final ILogger LOGGER = Logger.getLogger(Config.class); private URL configurationUrl; private File configurationFile; private ClassLoader classLoader; private Properties properties = new Properties(); private String instanceName; private GroupConfig groupConfig = new GroupConfig(); private NetworkConfig networkConfig = new NetworkConfig(); private ConfigPatternMatcher configPatternMatcher = new MatchingPointConfigPatternMatcher(); private final Map<String, MapConfig> mapConfigs = new ConcurrentHashMap<String, MapConfig>(); private final Map<String, CacheSimpleConfig> cacheConfigs = new ConcurrentHashMap<String, CacheSimpleConfig>(); private final Map<String, TopicConfig> topicConfigs = new ConcurrentHashMap<String, TopicConfig>(); private final Map<String, ReliableTopicConfig> reliableTopicConfigs = new ConcurrentHashMap<String, ReliableTopicConfig>(); private final Map<String, QueueConfig> queueConfigs = new ConcurrentHashMap<String, QueueConfig>(); private final Map<String, MultiMapConfig> multiMapConfigs = new ConcurrentHashMap<String, MultiMapConfig>(); private final Map<String, ListConfig> listConfigs = new ConcurrentHashMap<String, ListConfig>(); private final Map<String, SetConfig> setConfigs = new ConcurrentHashMap<String, SetConfig>(); private final Map<String, ExecutorConfig> executorConfigs = new ConcurrentHashMap<String, ExecutorConfig>(); private final Map<String, SemaphoreConfig> semaphoreConfigs = new ConcurrentHashMap<String, SemaphoreConfig>(); private final Map<String, ReplicatedMapConfig> replicatedMapConfigs = new ConcurrentHashMap<String, ReplicatedMapConfig>(); private final Map<String, WanReplicationConfig> wanReplicationConfigs = new ConcurrentHashMap<String, WanReplicationConfig>(); private final Map<String, JobTrackerConfig> jobTrackerConfigs = new ConcurrentHashMap<String, JobTrackerConfig>(); private final Map<String, QuorumConfig> quorumConfigs = new ConcurrentHashMap<String, QuorumConfig>(); private final Map<String, RingbufferConfig> ringbufferConfigs = new ConcurrentHashMap<String, RingbufferConfig>(); private ServicesConfig servicesConfig = new ServicesConfig(); private SecurityConfig securityConfig = new SecurityConfig(); private final List<ListenerConfig> listenerConfigs = new LinkedList<ListenerConfig>(); private PartitionGroupConfig partitionGroupConfig = new PartitionGroupConfig(); private ManagementCenterConfig managementCenterConfig = new ManagementCenterConfig(); private SerializationConfig serializationConfig = new SerializationConfig(); private ManagedContext managedContext; private ConcurrentMap<String, Object> userContext = new ConcurrentHashMap<String, Object>(); private MemberAttributeConfig memberAttributeConfig = new MemberAttributeConfig(); private NativeMemoryConfig nativeMemoryConfig = new NativeMemoryConfig(); private HotRestartPersistenceConfig hotRestartPersistenceConfig = new HotRestartPersistenceConfig(); private String licenseKey; private boolean liteMember; public Config() {} public Config(String instanceName) { this.instanceName = instanceName; } /** * Returns the class-loader that will be used in serialization. * * <p>If null, then thread context class-loader will be used instead. * * @return the class-loader */ public ClassLoader getClassLoader() { return classLoader; } /** * Sets the class-loader to be used during de-serialization and as context class-loader of * Hazelcast internal threads. * * <p> * * <p>If not set (or set to null); thread context class-loader will be used in required places. * * <p> * * <p>Default value is null. * * @param classLoader class-loader to be used during de-serialization * @return Config instance */ public Config setClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; return this; } public ConfigPatternMatcher getConfigPatternMatcher() { return configPatternMatcher; } public void setConfigPatternMatcher(ConfigPatternMatcher configPatternMatcher) { if (configPatternMatcher == null) { throw new IllegalArgumentException("ConfigPatternMatcher is not allowed to be null!"); } this.configPatternMatcher = configPatternMatcher; } /** * Gets a named property already set or from system properties if not exists. * * @param name property name * @return value of the property */ public String getProperty(String name) { String value = properties.getProperty(name); return value != null ? value : System.getProperty(name); } /** * Sets the value of a named property. * * @param name property name * @param value value of the property * @return configured {@link Config} for chaining */ public Config setProperty(String name, String value) { properties.put(name, value); return this; } /** * Gets a {@link HazelcastProperty} already set or from system properties if not exists. * * <p>Deprecated since Hazelcast 3.7, use {@link #getProperty(String)} instead. * * @param property {@link HazelcastProperty} to get * @return value of the property * @deprecated since Hazelcast 3.7 */ @Deprecated public String getProperty(HazelcastProperty property) { return getProperty(property.getName()); } /** * Sets the value of a {@link HazelcastProperty}. * * <p>Deprecated since Hazelcast 3.7, use {@link #setProperty(String, String)} instead. * * @param property {@link HazelcastProperty} to set * @param value value of the property * @return configured {@link Config} for chaining * @deprecated since Hazelcast 3.7 */ @Deprecated public Config setProperty(HazelcastProperty property, String value) { return setProperty(property.getName(), value); } public MemberAttributeConfig getMemberAttributeConfig() { return memberAttributeConfig; } public void setMemberAttributeConfig(MemberAttributeConfig memberAttributeConfig) { this.memberAttributeConfig = memberAttributeConfig; } public Properties getProperties() { return properties; } public Config setProperties(Properties properties) { this.properties = properties; return this; } public String getInstanceName() { return instanceName; } public Config setInstanceName(String instanceName) { this.instanceName = instanceName; return this; } public GroupConfig getGroupConfig() { return groupConfig; } public Config setGroupConfig(GroupConfig groupConfig) { this.groupConfig = groupConfig; return this; } public NetworkConfig getNetworkConfig() { return networkConfig; } public Config setNetworkConfig(NetworkConfig networkConfig) { this.networkConfig = networkConfig; return this; } public MapConfig findMapConfig(String name) { String baseName = getBaseName(name); MapConfig config = lookupByPattern(mapConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getMapConfig("default").getAsReadOnly(); } public MapConfig getMapConfig(String name) { String baseName = getBaseName(name); MapConfig config = lookupByPattern(mapConfigs, baseName); if (config != null) { return config; } MapConfig defConfig = mapConfigs.get("default"); if (defConfig == null) { defConfig = new MapConfig(); defConfig.setName("default"); addMapConfig(defConfig); } config = new MapConfig(defConfig); config.setName(name); addMapConfig(config); return config; } public Config addMapConfig(MapConfig mapConfig) { mapConfigs.put(mapConfig.getName(), mapConfig); return this; } /** @return the mapConfigs */ public Map<String, MapConfig> getMapConfigs() { return mapConfigs; } /** @param mapConfigs the mapConfigs to set */ public Config setMapConfigs(Map<String, MapConfig> mapConfigs) { this.mapConfigs.clear(); this.mapConfigs.putAll(mapConfigs); for (final Entry<String, MapConfig> entry : this.mapConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public CacheSimpleConfig findCacheConfig(String name) { name = getBaseName(name); return lookupByPattern(cacheConfigs, name); } public CacheSimpleConfig getCacheConfig(String name) { String baseName = getBaseName(name); CacheSimpleConfig config = lookupByPattern(cacheConfigs, baseName); if (config != null) { return config; } CacheSimpleConfig defConfig = cacheConfigs.get("default"); if (defConfig == null) { defConfig = new CacheSimpleConfig(); defConfig.setName("default"); addCacheConfig(defConfig); } config = new CacheSimpleConfig(defConfig); config.setName(name); addCacheConfig(config); return config; } public Config addCacheConfig(CacheSimpleConfig cacheConfig) { cacheConfigs.put(cacheConfig.getName(), cacheConfig); return this; } /** @return the cacheConfigs */ public Map<String, CacheSimpleConfig> getCacheConfigs() { return cacheConfigs; } /** @param cacheConfigs the cacheConfigs to set */ public Config setCacheConfigs(Map<String, CacheSimpleConfig> cacheConfigs) { this.cacheConfigs.clear(); this.cacheConfigs.putAll(cacheConfigs); for (final Entry<String, CacheSimpleConfig> entry : this.cacheConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public QueueConfig findQueueConfig(String name) { String baseName = getBaseName(name); QueueConfig config = lookupByPattern(queueConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getQueueConfig("default").getAsReadOnly(); } public QueueConfig getQueueConfig(String name) { String baseName = getBaseName(name); QueueConfig config = lookupByPattern(queueConfigs, baseName); if (config != null) { return config; } QueueConfig defConfig = queueConfigs.get("default"); if (defConfig == null) { defConfig = new QueueConfig(); defConfig.setName("default"); addQueueConfig(defConfig); } config = new QueueConfig(defConfig); config.setName(name); addQueueConfig(config); return config; } public Config addQueueConfig(QueueConfig queueConfig) { queueConfigs.put(queueConfig.getName(), queueConfig); return this; } public Map<String, QueueConfig> getQueueConfigs() { return queueConfigs; } public Config setQueueConfigs(Map<String, QueueConfig> queueConfigs) { this.queueConfigs.clear(); this.queueConfigs.putAll(queueConfigs); for (Entry<String, QueueConfig> entry : queueConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public ListConfig findListConfig(String name) { String baseName = getBaseName(name); ListConfig config = lookupByPattern(listConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getListConfig("default").getAsReadOnly(); } public ListConfig getListConfig(String name) { String baseName = getBaseName(name); ListConfig config = lookupByPattern(listConfigs, baseName); if (config != null) { return config; } ListConfig defConfig = listConfigs.get("default"); if (defConfig == null) { defConfig = new ListConfig(); defConfig.setName("default"); addListConfig(defConfig); } config = new ListConfig(defConfig); config.setName(name); addListConfig(config); return config; } public Config addListConfig(ListConfig listConfig) { listConfigs.put(listConfig.getName(), listConfig); return this; } public Map<String, ListConfig> getListConfigs() { return listConfigs; } public Config setListConfigs(Map<String, ListConfig> listConfigs) { this.listConfigs.clear(); this.listConfigs.putAll(listConfigs); for (Entry<String, ListConfig> entry : listConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public SetConfig findSetConfig(String name) { String baseName = getBaseName(name); SetConfig config = lookupByPattern(setConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getSetConfig("default").getAsReadOnly(); } public SetConfig getSetConfig(String name) { String baseName = getBaseName(name); SetConfig config = lookupByPattern(setConfigs, baseName); if (config != null) { return config; } SetConfig defConfig = setConfigs.get("default"); if (defConfig == null) { defConfig = new SetConfig(); defConfig.setName("default"); addSetConfig(defConfig); } config = new SetConfig(defConfig); config.setName(name); addSetConfig(config); return config; } public Config addSetConfig(SetConfig setConfig) { setConfigs.put(setConfig.getName(), setConfig); return this; } public Map<String, SetConfig> getSetConfigs() { return setConfigs; } public Config setSetConfigs(Map<String, SetConfig> setConfigs) { this.setConfigs.clear(); this.setConfigs.putAll(setConfigs); for (Entry<String, SetConfig> entry : setConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public MultiMapConfig findMultiMapConfig(String name) { String baseName = getBaseName(name); MultiMapConfig config = lookupByPattern(multiMapConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getMultiMapConfig("default").getAsReadOnly(); } public MultiMapConfig getMultiMapConfig(String name) { String baseName = getBaseName(name); MultiMapConfig config = lookupByPattern(multiMapConfigs, baseName); if (config != null) { return config; } MultiMapConfig defConfig = multiMapConfigs.get("default"); if (defConfig == null) { defConfig = new MultiMapConfig(); defConfig.setName("default"); addMultiMapConfig(defConfig); } config = new MultiMapConfig(defConfig); config.setName(name); addMultiMapConfig(config); return config; } public Config addMultiMapConfig(MultiMapConfig multiMapConfig) { multiMapConfigs.put(multiMapConfig.getName(), multiMapConfig); return this; } public Map<String, MultiMapConfig> getMultiMapConfigs() { return multiMapConfigs; } public Config setMultiMapConfigs(Map<String, MultiMapConfig> multiMapConfigs) { this.multiMapConfigs.clear(); this.multiMapConfigs.putAll(multiMapConfigs); for (final Entry<String, MultiMapConfig> entry : this.multiMapConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public ReplicatedMapConfig findReplicatedMapConfig(String name) { ReplicatedMapConfig config = lookupByPattern(replicatedMapConfigs, name); if (config != null) { return config.getAsReadOnly(); } return getReplicatedMapConfig("default").getAsReadOnly(); } public ReplicatedMapConfig getReplicatedMapConfig(String name) { ReplicatedMapConfig config = lookupByPattern(replicatedMapConfigs, name); if (config != null) { return config; } ReplicatedMapConfig defConfig = replicatedMapConfigs.get("default"); if (defConfig == null) { defConfig = new ReplicatedMapConfig(); defConfig.setName("default"); addReplicatedMapConfig(defConfig); } config = new ReplicatedMapConfig(defConfig); config.setName(name); addReplicatedMapConfig(config); return config; } public Config addReplicatedMapConfig(ReplicatedMapConfig replicatedMapConfig) { replicatedMapConfigs.put(replicatedMapConfig.getName(), replicatedMapConfig); return this; } public Map<String, ReplicatedMapConfig> getReplicatedMapConfigs() { return replicatedMapConfigs; } public Config setReplicatedMapConfigs(Map<String, ReplicatedMapConfig> replicatedMapConfigs) { this.replicatedMapConfigs.clear(); this.replicatedMapConfigs.putAll(replicatedMapConfigs); for (final Entry<String, ReplicatedMapConfig> entry : this.replicatedMapConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public RingbufferConfig findRingbufferConfig(String name) { String baseName = getBaseName(name); RingbufferConfig config = lookupByPattern(ringbufferConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getRingbufferConfig("default").getAsReadOnly(); } public RingbufferConfig getRingbufferConfig(String name) { String baseName = getBaseName(name); RingbufferConfig config = lookupByPattern(ringbufferConfigs, baseName); if (config != null) { return config; } RingbufferConfig defConfig = ringbufferConfigs.get("default"); if (defConfig == null) { defConfig = new RingbufferConfig("default"); addRingBufferConfig(defConfig); } config = new RingbufferConfig(name, defConfig); addRingBufferConfig(config); return config; } public Config addRingBufferConfig(RingbufferConfig ringbufferConfig) { ringbufferConfigs.put(ringbufferConfig.getName(), ringbufferConfig); return this; } public Map<String, RingbufferConfig> getRingbufferConfigs() { return ringbufferConfigs; } public TopicConfig findTopicConfig(String name) { String baseName = getBaseName(name); TopicConfig config = lookupByPattern(topicConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getTopicConfig("default").getAsReadOnly(); } public TopicConfig getTopicConfig(String name) { String baseName = getBaseName(name); TopicConfig config = lookupByPattern(topicConfigs, baseName); if (config != null) { return config; } TopicConfig defConfig = topicConfigs.get("default"); if (defConfig == null) { defConfig = new TopicConfig(); defConfig.setName("default"); addTopicConfig(defConfig); } config = new TopicConfig(defConfig); config.setName(name); addTopicConfig(config); return config; } public Config addTopicConfig(TopicConfig topicConfig) { topicConfigs.put(topicConfig.getName(), topicConfig); return this; } public ReliableTopicConfig findReliableTopicConfig(String name) { String baseName = getBaseName(name); ReliableTopicConfig config = lookupByPattern(reliableTopicConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getReliableTopicConfig("default").getAsReadOnly(); } public ReliableTopicConfig getReliableTopicConfig(String name) { String baseName = getBaseName(name); ReliableTopicConfig config = lookupByPattern(reliableTopicConfigs, baseName); if (config != null) { return config; } ReliableTopicConfig defConfig = reliableTopicConfigs.get("default"); if (defConfig == null) { defConfig = new ReliableTopicConfig("default"); addReliableTopicConfig(defConfig); } config = new ReliableTopicConfig(defConfig, name); addReliableTopicConfig(config); return config; } /** @return the reliable topic configs */ public Map<String, ReliableTopicConfig> getReliableTopicConfigs() { return reliableTopicConfigs; } public Config addReliableTopicConfig(ReliableTopicConfig topicConfig) { reliableTopicConfigs.put(topicConfig.getName(), topicConfig); return this; } /** @return the topicConfigs */ public Map<String, TopicConfig> getTopicConfigs() { return topicConfigs; } /** @param mapTopicConfigs the topicConfigs to set */ public Config setTopicConfigs(Map<String, TopicConfig> mapTopicConfigs) { this.topicConfigs.clear(); this.topicConfigs.putAll(mapTopicConfigs); for (final Entry<String, TopicConfig> entry : this.topicConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public ExecutorConfig findExecutorConfig(String name) { String baseName = getBaseName(name); ExecutorConfig config = lookupByPattern(executorConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getExecutorConfig("default").getAsReadOnly(); } /** * Returns the ExecutorConfig for the given name * * @param name name of the executor config * @return ExecutorConfig */ public ExecutorConfig getExecutorConfig(String name) { String baseName = getBaseName(name); ExecutorConfig config = lookupByPattern(executorConfigs, baseName); if (config != null) { return config; } ExecutorConfig defConfig = executorConfigs.get("default"); if (defConfig == null) { defConfig = new ExecutorConfig(); defConfig.setName("default"); addExecutorConfig(defConfig); } config = new ExecutorConfig(defConfig); config.setName(name); addExecutorConfig(config); return config; } /** * Adds a new ExecutorConfig by name * * @param executorConfig executor config to add * @return this config instance */ public Config addExecutorConfig(ExecutorConfig executorConfig) { this.executorConfigs.put(executorConfig.getName(), executorConfig); return this; } public Map<String, ExecutorConfig> getExecutorConfigs() { return executorConfigs; } public Config setExecutorConfigs(Map<String, ExecutorConfig> executorConfigs) { this.executorConfigs.clear(); this.executorConfigs.putAll(executorConfigs); for (Entry<String, ExecutorConfig> entry : executorConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public SemaphoreConfig findSemaphoreConfig(String name) { String baseName = getBaseName(name); SemaphoreConfig config = lookupByPattern(semaphoreConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getSemaphoreConfig("default").getAsReadOnly(); } /** * Returns the SemaphoreConfig for the given name * * @param name name of the semaphore config * @return SemaphoreConfig */ public SemaphoreConfig getSemaphoreConfig(String name) { String baseName = getBaseName(name); SemaphoreConfig config = lookupByPattern(semaphoreConfigs, baseName); if (config != null) { return config; } SemaphoreConfig defConfig = semaphoreConfigs.get("default"); if (defConfig == null) { defConfig = new SemaphoreConfig(); defConfig.setName("default"); addSemaphoreConfig(defConfig); } config = new SemaphoreConfig(defConfig); config.setName(name); addSemaphoreConfig(config); return config; } /** * Adds a new SemaphoreConfig by name * * @param semaphoreConfig semaphore config to add * @return this config instance */ public Config addSemaphoreConfig(SemaphoreConfig semaphoreConfig) { this.semaphoreConfigs.put(semaphoreConfig.getName(), semaphoreConfig); return this; } /** * Returns the collection of semaphore configs. * * @return collection of semaphore configs. */ public Collection<SemaphoreConfig> getSemaphoreConfigs() { return semaphoreConfigs.values(); } public Config setSemaphoreConfigs(Map<String, SemaphoreConfig> semaphoreConfigs) { this.semaphoreConfigs.clear(); this.semaphoreConfigs.putAll(semaphoreConfigs); for (final Entry<String, SemaphoreConfig> entry : this.semaphoreConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public WanReplicationConfig getWanReplicationConfig(String name) { return wanReplicationConfigs.get(name); } public Config addWanReplicationConfig(WanReplicationConfig wanReplicationConfig) { wanReplicationConfigs.put(wanReplicationConfig.getName(), wanReplicationConfig); return this; } public Map<String, WanReplicationConfig> getWanReplicationConfigs() { return wanReplicationConfigs; } public Config setWanReplicationConfigs(Map<String, WanReplicationConfig> wanReplicationConfigs) { this.wanReplicationConfigs.clear(); this.wanReplicationConfigs.putAll(wanReplicationConfigs); return this; } public JobTrackerConfig findJobTrackerConfig(String name) { String baseName = getBaseName(name); JobTrackerConfig config = lookupByPattern(jobTrackerConfigs, baseName); if (config != null) { return config.getAsReadOnly(); } return getJobTrackerConfig(name); } public JobTrackerConfig getJobTrackerConfig(String name) { String baseName = getBaseName(name); JobTrackerConfig config = lookupByPattern(jobTrackerConfigs, baseName); if (config != null) { return config; } JobTrackerConfig defConfig = jobTrackerConfigs.get("default"); if (defConfig == null) { defConfig = new JobTrackerConfig(); defConfig.setName("default"); addJobTrackerConfig(defConfig); } config = new JobTrackerConfig(defConfig); config.setName(name); addJobTrackerConfig(config); return config; } public Config addJobTrackerConfig(JobTrackerConfig jobTrackerConfig) { jobTrackerConfigs.put(jobTrackerConfig.getName(), jobTrackerConfig); return this; } public Map<String, JobTrackerConfig> getJobTrackerConfigs() { return jobTrackerConfigs; } public Config setJobTrackerConfigs(Map<String, JobTrackerConfig> jobTrackerConfigs) { this.jobTrackerConfigs.clear(); this.jobTrackerConfigs.putAll(jobTrackerConfigs); for (final Entry<String, JobTrackerConfig> entry : this.jobTrackerConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public Map<String, QuorumConfig> getQuorumConfigs() { return quorumConfigs; } public QuorumConfig getQuorumConfig(String name) { String baseName = getBaseName(name); QuorumConfig config = lookupByPattern(quorumConfigs, baseName); if (config != null) { return config; } QuorumConfig defConfig = quorumConfigs.get("default"); if (defConfig == null) { defConfig = new QuorumConfig(); defConfig.setName("default"); addQuorumConfig(defConfig); } config = new QuorumConfig(defConfig); config.setName(name); addQuorumConfig(config); return config; } public QuorumConfig findQuorumConfig(String name) { String baseName = getBaseName(name); QuorumConfig config = lookupByPattern(quorumConfigs, baseName); if (config != null) { return config; } return getQuorumConfig("default"); } public Config setQuorumConfigs(Map<String, QuorumConfig> quorumConfigs) { this.quorumConfigs.clear(); this.quorumConfigs.putAll(quorumConfigs); for (final Entry<String, QuorumConfig> entry : this.quorumConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } public Config addQuorumConfig(QuorumConfig quorumConfig) { quorumConfigs.put(quorumConfig.getName(), quorumConfig); return this; } public ManagementCenterConfig getManagementCenterConfig() { return managementCenterConfig; } public Config setManagementCenterConfig(ManagementCenterConfig managementCenterConfig) { this.managementCenterConfig = managementCenterConfig; return this; } public ServicesConfig getServicesConfig() { return servicesConfig; } public Config setServicesConfig(ServicesConfig servicesConfig) { this.servicesConfig = servicesConfig; return this; } public SecurityConfig getSecurityConfig() { return securityConfig; } public Config setSecurityConfig(SecurityConfig securityConfig) { this.securityConfig = securityConfig; return this; } public Config addListenerConfig(ListenerConfig listenerConfig) { getListenerConfigs().add(listenerConfig); return this; } public List<ListenerConfig> getListenerConfigs() { return listenerConfigs; } public Config setListenerConfigs(List<ListenerConfig> listenerConfigs) { this.listenerConfigs.clear(); this.listenerConfigs.addAll(listenerConfigs); return this; } public SerializationConfig getSerializationConfig() { return serializationConfig; } public Config setSerializationConfig(SerializationConfig serializationConfig) { this.serializationConfig = serializationConfig; return this; } public PartitionGroupConfig getPartitionGroupConfig() { return partitionGroupConfig; } public Config setPartitionGroupConfig(PartitionGroupConfig partitionGroupConfig) { this.partitionGroupConfig = partitionGroupConfig; return this; } /** * Returns hot restart configuration for this member * * @return hot restart configuration */ public HotRestartPersistenceConfig getHotRestartPersistenceConfig() { return hotRestartPersistenceConfig; } /** * Sets hot restart configuration. * * @param hrConfig hot restart configuration * @return Config */ public Config setHotRestartPersistenceConfig(HotRestartPersistenceConfig hrConfig) { checkNotNull(hrConfig, "Hot restart config cannot be null!"); this.hotRestartPersistenceConfig = hrConfig; return this; } public ManagedContext getManagedContext() { return managedContext; } public Config setManagedContext(final ManagedContext managedContext) { this.managedContext = managedContext; return this; } public ConcurrentMap<String, Object> getUserContext() { return userContext; } public Config setUserContext(ConcurrentMap<String, Object> userContext) { if (userContext == null) { throw new IllegalArgumentException("userContext can't be null"); } this.userContext = userContext; return this; } public NativeMemoryConfig getNativeMemoryConfig() { return nativeMemoryConfig; } public Config setNativeMemoryConfig(NativeMemoryConfig nativeMemoryConfig) { this.nativeMemoryConfig = nativeMemoryConfig; return this; } /** @return the configurationUrl */ public URL getConfigurationUrl() { return configurationUrl; } /** @param configurationUrl the configurationUrl to set */ public Config setConfigurationUrl(URL configurationUrl) { this.configurationUrl = configurationUrl; return this; } /** @return the configurationFile */ public File getConfigurationFile() { return configurationFile; } /** @param configurationFile the configurationFile to set */ public Config setConfigurationFile(File configurationFile) { this.configurationFile = configurationFile; return this; } public String getLicenseKey() { return licenseKey; } public Config setLicenseKey(final String licenseKey) { this.licenseKey = licenseKey; return this; } /** * @return indicates if the node is a lite member or not. Lite members do not own any partition. */ public boolean isLiteMember() { return liteMember; } /** * @param liteMember sets if the node will be a lite member or not. Lite members do not own any * partition. */ public Config setLiteMember(boolean liteMember) { this.liteMember = liteMember; return this; } private <T> T lookupByPattern(Map<String, T> configPatterns, String itemName) { T candidate = configPatterns.get(itemName); if (candidate != null) { return candidate; } String configPatternKey = configPatternMatcher.matches(configPatterns.keySet(), itemName); if (configPatternKey != null) { return configPatterns.get(configPatternKey); } if (!"default".equals(itemName) && !itemName.startsWith("hz:")) { LOGGER.finest("No configuration found for " + itemName + ", using default config!"); } return null; } // TODO: This mechanism isn't used anymore to determine if 2 HZ configurations are compatible. // See {@link ConfigCheck} for more information. /** * @param config * @return true if config is compatible with this one, false if config belongs to another group * @throws RuntimeException if map, queue, topic configs are incompatible */ public boolean isCompatible(final Config config) { if (config == null) { throw new IllegalArgumentException("Expected not null config"); } if (!this.groupConfig.getName().equals(config.getGroupConfig().getName())) { return false; } if (!this.groupConfig.getPassword().equals(config.getGroupConfig().getPassword())) { throw new HazelcastException("Incompatible group password"); } checkMapConfigCompatible(config); return true; } private void checkMapConfigCompatible(final Config config) { Set<String> mapConfigNames = new HashSet<String>(mapConfigs.keySet()); mapConfigNames.addAll(config.mapConfigs.keySet()); for (final String name : mapConfigNames) { final MapConfig thisMapConfig = lookupByPattern(mapConfigs, name); final MapConfig thatMapConfig = lookupByPattern(config.mapConfigs, name); if (thisMapConfig != null && thatMapConfig != null && !thisMapConfig.isCompatible(thatMapConfig)) { throw new HazelcastException( format( "Incompatible map config this:\n{0}\nanother:\n{1}", thisMapConfig, thatMapConfig)); } } } @Override public String toString() { return "Config{" + "groupConfig=" + groupConfig + ", properties=" + properties + ", networkConfig=" + networkConfig + ", mapConfigs=" + mapConfigs + ", topicConfigs=" + topicConfigs + ", reliableTopicConfigs=" + reliableTopicConfigs + ", queueConfigs=" + queueConfigs + ", multiMapConfigs=" + multiMapConfigs + ", executorConfigs=" + executorConfigs + ", semaphoreConfigs=" + semaphoreConfigs + ", ringbufferConfigs=" + ringbufferConfigs + ", wanReplicationConfigs=" + wanReplicationConfigs + ", listenerConfigs=" + listenerConfigs + ", partitionGroupConfig=" + partitionGroupConfig + ", managementCenterConfig=" + managementCenterConfig + ", securityConfig=" + securityConfig + ", liteMember=" + liteMember + '}'; } }
public class ProducerConsumerTest { private static final ILogger LOGGER = Logger.getLogger(ProducerConsumerTest.class); // properties public String basename = ProducerConsumerTest.class.getSimpleName(); public int producerCount = 4; public int consumerCount = 4; public int maxIntervalMillis = 1000; private IAtomicLong produced; private IQueue<Work> workQueue; private IAtomicLong consumed; private TestContext testContext; @Setup public void setup(TestContext testContext) throws Exception { this.testContext = testContext; HazelcastInstance targetInstance = testContext.getTargetInstance(); produced = targetInstance.getAtomicLong(basename + "-" + testContext.getTestId() + ":Produced"); consumed = targetInstance.getAtomicLong(basename + "-" + testContext.getTestId() + ":Consumed"); workQueue = targetInstance.getQueue(basename + "-" + testContext.getTestId() + ":WorkQueue"); } @Teardown public void teardown() throws Exception { produced.destroy(); workQueue.destroy(); consumed.destroy(); } @Run public void run() { ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId()); for (int k = 0; k < producerCount; k++) { spawner.spawn("ProducerThread", new Producer(k)); } for (int k = 0; k < consumerCount; k++) { spawner.spawn("ConsumerThread", new Consumer(k)); } spawner.awaitCompletion(); } @Verify public void verify() { long expected = workQueue.size() + consumed.get(); long actual = produced.get(); assertEquals(expected, actual); } private class Producer implements Runnable { final Random rand = new Random(System.currentTimeMillis()); final int id; public Producer(int id) { this.id = id; } @Override public void run() { long iteration = 0; while (!testContext.isStopped()) { try { Thread.sleep(rand.nextInt(maxIntervalMillis) * consumerCount); produced.incrementAndGet(); workQueue.offer(new Work()); iteration++; if (iteration % 10 == 0) { LOGGER.info( String.format( "%s prod-id: %d, iteration: %d, produced: %d, workQueue: %d, consumed: %d", Thread.currentThread().getName(), id, iteration, produced.get(), workQueue.size(), consumed.get())); } } catch (Exception e) { throw new TestException(e); } } } } private class Consumer implements Runnable { Random rand = new Random(System.currentTimeMillis()); int id; public Consumer(int id) { this.id = id; } @Override public void run() { long iteration = 0; while (!testContext.isStopped()) { try { workQueue.take(); consumed.incrementAndGet(); Thread.sleep(rand.nextInt(maxIntervalMillis) * producerCount); iteration++; if (iteration % 20 == 0) { LOGGER.info( String.format( "%s prod-id: %d, iteration: %d, produced: %d, workQueue: %d, consumed: %d", Thread.currentThread().getName(), id, iteration, produced.get(), workQueue.size(), consumed.get())); } } catch (Exception e) { throw new TestException(e); } } } } static class Work implements Serializable {} public static void main(String[] args) throws Exception { ProducerConsumerTest test = new ProducerConsumerTest(); new TestRunner<ProducerConsumerTest>(test).run(); } }
public class ClientConnectionManagerImpl implements ClientConnectionManager { private static final int RETRY_COUNT = 20; private static final ILogger LOGGER = Logger.getLogger(ClientConnectionManagerImpl.class); private static final IOSelectorOutOfMemoryHandler OUT_OF_MEMORY_HANDLER = new IOSelectorOutOfMemoryHandler() { @Override public void handle(OutOfMemoryError error) { LOGGER.severe(error); } }; private final int connectionTimeout; private final int heartBeatInterval; private final int heartBeatTimeout; private final ConcurrentMap<Address, Object> connectionLockMap = new ConcurrentHashMap<Address, Object>(); private final AtomicInteger connectionIdGen = new AtomicInteger(); private final HazelcastClient client; private final Router router; private SocketInterceptor socketInterceptor; private final SocketOptions socketOptions; private final IOSelector inSelector; private final IOSelector outSelector; private final boolean smartRouting; private final OwnerConnectionFuture ownerConnectionFuture = new OwnerConnectionFuture(); private final Credentials credentials; private volatile ClientPrincipal principal; private final AtomicInteger callIdIncrementer = new AtomicInteger(); private final SocketChannelWrapperFactory socketChannelWrapperFactory; private final ClientExecutionServiceImpl executionService; private ClientInvocationServiceImpl invocationService; private final AddressTranslator addressTranslator; private final ConcurrentMap<Address, ClientConnection> connections = new ConcurrentHashMap<Address, ClientConnection>(); private volatile boolean live; public ClientConnectionManagerImpl( HazelcastClient client, LoadBalancer loadBalancer, AddressTranslator addressTranslator) { this.client = client; this.addressTranslator = addressTranslator; final ClientConfig config = client.getClientConfig(); final ClientNetworkConfig networkConfig = config.getNetworkConfig(); final int connTimeout = networkConfig.getConnectionTimeout(); connectionTimeout = connTimeout == 0 ? Integer.MAX_VALUE : connTimeout; final ClientProperties clientProperties = client.getClientProperties(); int timeout = clientProperties.getHeartbeatTimeout().getInteger(); this.heartBeatTimeout = timeout > 0 ? timeout : Integer.parseInt(PROP_HEARTBEAT_TIMEOUT_DEFAULT); int interval = clientProperties.getHeartbeatInterval().getInteger(); heartBeatInterval = interval > 0 ? interval : Integer.parseInt(PROP_HEARTBEAT_INTERVAL_DEFAULT); smartRouting = networkConfig.isSmartRouting(); executionService = (ClientExecutionServiceImpl) client.getClientExecutionService(); credentials = initCredentials(config); router = new Router(loadBalancer); inSelector = new InSelectorImpl( client.getThreadGroup(), "InSelector", Logger.getLogger(InSelectorImpl.class), OUT_OF_MEMORY_HANDLER); outSelector = new OutSelectorImpl( client.getThreadGroup(), "OutSelector", Logger.getLogger(OutSelectorImpl.class), OUT_OF_MEMORY_HANDLER); socketInterceptor = initSocketInterceptor(networkConfig.getSocketInterceptorConfig()); socketOptions = networkConfig.getSocketOptions(); socketChannelWrapperFactory = initSocketChannel(networkConfig); } private SocketChannelWrapperFactory initSocketChannel(ClientNetworkConfig networkConfig) { // ioService.getSSLConfig(); TODO SSLConfig sslConfig = networkConfig.getSSLConfig(); if (sslConfig != null && sslConfig.isEnabled()) { LOGGER.info("SSL is enabled"); return new SSLSocketChannelWrapperFactory(sslConfig); } else { return new DefaultSocketChannelWrapperFactory(); } } private Credentials initCredentials(ClientConfig config) { final GroupConfig groupConfig = config.getGroupConfig(); final ClientSecurityConfig securityConfig = config.getSecurityConfig(); Credentials c = securityConfig.getCredentials(); if (c == null) { final String credentialsClassname = securityConfig.getCredentialsClassname(); // todo: Should be moved to a reflection utility. if (credentialsClassname != null) { try { c = ClassLoaderUtil.newInstance(config.getClassLoader(), credentialsClassname); } catch (Exception e) { throw ExceptionUtil.rethrow(e); } } } if (c == null) { c = new UsernamePasswordCredentials(groupConfig.getName(), groupConfig.getPassword()); } return c; } private SocketInterceptor initSocketInterceptor(SocketInterceptorConfig sic) { SocketInterceptor implementation = null; if (sic != null && sic.isEnabled()) { implementation = (SocketInterceptor) sic.getImplementation(); if (implementation == null && sic.getClassName() != null) { try { implementation = (SocketInterceptor) Class.forName(sic.getClassName()).newInstance(); } catch (Throwable e) { LOGGER.severe("SocketInterceptor class cannot be instantiated!" + sic.getClassName(), e); } } } if (implementation != null) { implementation.init(sic.getProperties()); } return implementation; } @Override public boolean isLive() { return live; } private SerializationService getSerializationService() { return client.getSerializationService(); } @Override public synchronized void start() { if (live) { return; } live = true; inSelector.start(); outSelector.start(); invocationService = (ClientInvocationServiceImpl) client.getInvocationService(); final HeartBeat heartBeat = new HeartBeat(); executionService.scheduleWithFixedDelay( heartBeat, heartBeatInterval, heartBeatInterval, TimeUnit.MILLISECONDS); } @Override public synchronized void shutdown() { if (!live) { return; } live = false; for (ClientConnection connection : connections.values()) { connection.close(); } inSelector.shutdown(); outSelector.shutdown(); connectionLockMap.clear(); } @Override public void onCloseOwnerConnection() { // mark the owner connection as closed so that operations requiring owner connection can be // waited. ownerConnectionFuture.markAsClosed(); } @Override public ClientConnection ownerConnection(Address address) throws Exception { final Address translatedAddress = addressTranslator.translate(address); if (translatedAddress == null) { throw new RetryableIOException(address + " can not be translated! "); } return ownerConnectionFuture.createNew(translatedAddress); } @Override public ClientConnection tryToConnect(Address target) throws Exception { Authenticator authenticator = new ClusterAuthenticator(); int count = 0; IOException lastError = null; while (count < RETRY_COUNT) { try { if (target == null || !isMember(target)) { Address address = getAddressFromLoadBalancer(); return getOrConnect(address, authenticator); } else { return getOrConnect(target, authenticator); } } catch (IOException e) { lastError = e; } target = null; count++; } throw lastError; } private Address getAddressFromLoadBalancer() { Address address = router.next(); if (address == null) { Set<Member> members = client.getCluster().getMembers(); String msg; if (members.isEmpty()) { msg = "No address was return by the LoadBalancer since there are no members in the cluster"; } else { msg = "No address was return by the LoadBalancer. " + "But the cluster contains the following members:" + members; } throw new IllegalStateException(msg); } return address; } @Override public String getUuid() { final ClientPrincipal cp = principal; return cp != null ? cp.getUuid() : null; } private boolean isMember(Address target) { final ClientClusterService clientClusterService = client.getClientClusterService(); return clientClusterService.getMember(target) != null; } private ClientConnection getOrConnect(Address target, Authenticator authenticator) throws Exception { if (!smartRouting) { target = ownerConnectionFuture.getOrWaitForCreation().getEndPoint(); } Address address = addressTranslator.translate(target); if (address == null) { throw new IOException("Address is required!"); } ClientConnection clientConnection = connections.get(address); if (clientConnection == null) { final Object lock = getLock(address); synchronized (lock) { clientConnection = connections.get(address); if (clientConnection == null) { final ConnectionProcessor connectionProcessor = new ConnectionProcessor(address, authenticator, false); final ICompletableFuture<ClientConnection> future = executionService.submitInternal(connectionProcessor); try { clientConnection = future.get(connectionTimeout, TimeUnit.MILLISECONDS); } catch (Exception e) { future.cancel(true); throw new RetryableIOException(e); } ClientConnection current = connections.putIfAbsent(address, clientConnection); if (current != null) { clientConnection.close(); clientConnection = current; } } } } return clientConnection; } private final class ConnectionProcessor implements Callable<ClientConnection> { final Address address; final Authenticator authenticator; final boolean isBlock; private ConnectionProcessor( final Address address, final Authenticator authenticator, final boolean isBlock) { this.address = address; this.authenticator = authenticator; this.isBlock = isBlock; } @Override public ClientConnection call() throws Exception { if (!live) { throw new HazelcastException("ConnectionManager is not active!!!"); } SocketChannel socketChannel = null; try { socketChannel = SocketChannel.open(); Socket socket = socketChannel.socket(); socket.setKeepAlive(socketOptions.isKeepAlive()); socket.setTcpNoDelay(socketOptions.isTcpNoDelay()); socket.setReuseAddress(socketOptions.isReuseAddress()); if (socketOptions.getLingerSeconds() > 0) { socket.setSoLinger(true, socketOptions.getLingerSeconds()); } int bufferSize = socketOptions.getBufferSize() * KILO_BYTE; if (bufferSize < 0) { bufferSize = DEFAULT_BUFFER_SIZE_BYTE; } socket.setSendBufferSize(bufferSize); socket.setReceiveBufferSize(bufferSize); socketChannel.socket().connect(address.getInetSocketAddress(), connectionTimeout); SocketChannelWrapper socketChannelWrapper = socketChannelWrapperFactory.wrapSocketChannel(socketChannel, true); final ClientConnection clientConnection = new ClientConnection( ClientConnectionManagerImpl.this, inSelector, outSelector, connectionIdGen.incrementAndGet(), socketChannelWrapper, executionService, invocationService, client.getSerializationService()); socketChannel.configureBlocking(true); if (socketInterceptor != null) { socketInterceptor.onConnect(socket); } authenticator.auth(clientConnection); socketChannel.configureBlocking(isBlock); socket.setSoTimeout(0); if (!isBlock) { clientConnection.getReadHandler().register(); } return clientConnection; } catch (Exception e) { if (socketChannel != null) { socketChannel.close(); } throw ExceptionUtil.rethrow(e); } } } @Override public void onConnectionClose(ClientConnection clientConnection) { Address endpoint = clientConnection.getRemoteEndpoint(); if (endpoint != null) { connections.remove(clientConnection.getRemoteEndpoint()); ownerConnectionFuture.closeIfAddressMatches(endpoint); } } @Override public boolean removeEventHandler(Integer callId) { if (callId != null) { for (ClientConnection clientConnection : connections.values()) { if (clientConnection.deRegisterEventHandler(callId) != null) { return true; } } } return false; } @Override public void handlePacket(Packet packet) { final ClientConnection conn = (ClientConnection) packet.getConn(); conn.incrementPacketCount(); if (packet.isHeaderSet(Packet.HEADER_EVENT)) { final ClientListenerServiceImpl listenerService = (ClientListenerServiceImpl) client.getListenerService(); listenerService.handleEventPacket(packet); } else { invocationService.handlePacket(packet); } } @Override public int newCallId() { return callIdIncrementer.incrementAndGet(); } public class ManagerAuthenticator implements Authenticator { @Override public void auth(ClientConnection connection) throws AuthenticationException, IOException { final Object response = authenticate(connection, credentials, principal, true); principal = (ClientPrincipal) response; } } private class ClusterAuthenticator implements Authenticator { @Override public void auth(ClientConnection connection) throws AuthenticationException, IOException { authenticate(connection, credentials, principal, false); } } private Object authenticate( ClientConnection connection, Credentials credentials, ClientPrincipal principal, boolean firstConnection) throws IOException { final SerializationService ss = getSerializationService(); AuthenticationRequest auth = new AuthenticationRequest(credentials, principal); connection.init(); auth.setOwnerConnection(firstConnection); // contains remoteAddress and principal SerializableCollection collectionWrapper; try { collectionWrapper = (SerializableCollection) sendAndReceive(auth, connection); } catch (Exception e) { throw new RetryableIOException(e); } final Iterator<Data> iter = collectionWrapper.iterator(); if (iter.hasNext()) { final Data addressData = iter.next(); final Address address = ss.toObject(addressData); connection.setRemoteEndpoint(address); if (iter.hasNext()) { final Data principalData = iter.next(); return ss.toObject(principalData); } } throw new AuthenticationException(); } @Override public Object sendAndReceive(ClientRequest request, ClientConnection connection) throws Exception { final SerializationService ss = client.getSerializationService(); connection.write(ss.toData(request)); final Data data = connection.read(); ClientResponse clientResponse = ss.toObject(data); Object response = ss.toObject(clientResponse.getResponse()); if (response instanceof Throwable) { Throwable t = (Throwable) response; ExceptionUtil.fixRemoteStackTrace(t, Thread.currentThread().getStackTrace()); throw new Exception(t); } return response; } interface SocketChannelWrapperFactory { SocketChannelWrapper wrapSocketChannel(SocketChannel socketChannel, boolean client) throws Exception; } static class DefaultSocketChannelWrapperFactory implements SocketChannelWrapperFactory { @Override public SocketChannelWrapper wrapSocketChannel(SocketChannel socketChannel, boolean client) throws Exception { return new DefaultSocketChannelWrapper(socketChannel); } } static class SSLSocketChannelWrapperFactory implements SocketChannelWrapperFactory { final SSLContextFactory sslContextFactory; SSLSocketChannelWrapperFactory(SSLConfig sslConfig) { // if (CipherHelper.isSymmetricEncryptionEnabled(ioService)) { // throw new RuntimeException("SSL and SymmetricEncryption cannot be both // enabled!"); // } SSLContextFactory sslContextFactoryObject = (SSLContextFactory) sslConfig.getFactoryImplementation(); try { String factoryClassName = sslConfig.getFactoryClassName(); if (sslContextFactoryObject == null && factoryClassName != null) { sslContextFactoryObject = (SSLContextFactory) Class.forName(factoryClassName).newInstance(); } if (sslContextFactoryObject == null) { sslContextFactoryObject = new BasicSSLContextFactory(); } sslContextFactoryObject.init(sslConfig.getProperties()); } catch (Exception e) { throw new RuntimeException(e); } sslContextFactory = sslContextFactoryObject; } @Override public SocketChannelWrapper wrapSocketChannel(SocketChannel socketChannel, boolean client) throws Exception { return new SSLSocketChannelWrapper(sslContextFactory.getSSLContext(), socketChannel, client); } } private Object getLock(Address address) { Object lock = connectionLockMap.get(address); if (lock == null) { lock = new Object(); Object current = connectionLockMap.putIfAbsent(address, lock); if (current != null) { lock = current; } } return lock; } class HeartBeat implements Runnable { public void run() { if (!live) { return; } final long now = Clock.currentTimeMillis(); for (ClientConnection connection : connections.values()) { if (now - connection.lastReadTime() > heartBeatTimeout) { connection.heartBeatingFailed(); } if (now - connection.lastReadTime() > heartBeatInterval) { final ClientPingRequest request = new ClientPingRequest(); invocationService.send(request, connection); } else { connection.heartBeatingSucceed(); } } } } public void removeEndpoint(Address address) { final ClientConnection clientConnection = connections.get(address); if (clientConnection != null) { clientConnection.close(); } } @Override public void onDetectingUnresponsiveConnection(ClientConnection connection) { if (smartRouting) { // closing the owner connection if unresponsive so that it can be switched to a healthy one. ownerConnectionFuture.closeIfAddressMatches(connection.getEndPoint()); // we do not close connection itself since we will continue to send heartbeat ping to this // connection. // IOUtil.closeResource(connection); return; } // close both owner and operation connection ownerConnectionFuture.close(); IOUtil.closeResource(connection); } private class OwnerConnectionFuture { private final Object ownerConnectionLock = new Object(); private volatile ClientConnection ownerConnection; private ClientConnection getOrWaitForCreation() throws IOException { ClientNetworkConfig networkConfig = client.getClientConfig().getNetworkConfig(); long connectionAttemptLimit = networkConfig.getConnectionAttemptLimit(); long connectionAttemptPeriod = networkConfig.getConnectionAttemptPeriod(); long waitTime = connectionAttemptLimit * connectionAttemptPeriod * 2; if (waitTime < 0) { waitTime = Long.MAX_VALUE; } final ClientConnection currentOwnerConnection = ownerConnection; if (currentOwnerConnection != null) { return currentOwnerConnection; } long remainingWait = waitTime; synchronized (ownerConnectionLock) { long waitStart = System.currentTimeMillis(); while (ownerConnection == null && remainingWait > 0) { try { ownerConnectionLock.wait(remainingWait); remainingWait = waitTime - (System.currentTimeMillis() - waitStart); } catch (InterruptedException e) { throw new IOException(e); } } if (ownerConnection == null) { LOGGER.warning("Wait for owner connection is timed out"); throw new IOException("Wait for owner connection is timed out"); } return ownerConnection; } } private ClientConnection createNew(Address address) throws RetryableIOException { final ManagerAuthenticator authenticator = new ManagerAuthenticator(); final ConnectionProcessor connectionProcessor = new ConnectionProcessor(address, authenticator, true); ICompletableFuture<ClientConnection> future = executionService.submitInternal(connectionProcessor); try { ClientConnection conn = future.get(connectionTimeout, TimeUnit.MILLISECONDS); synchronized (ownerConnectionLock) { ownerConnection = conn; ownerConnectionLock.notifyAll(); } return conn; } catch (Exception e) { future.cancel(true); throw new RetryableIOException(e); } } private void markAsClosed() { ownerConnection = null; } private void closeIfAddressMatches(Address address) { final ClientConnection currentOwnerConnection = ownerConnection; if (currentOwnerConnection == null || !currentOwnerConnection.live()) { return; } if (address.equals(currentOwnerConnection.getRemoteEndpoint())) { close(); } } private void close() { final ClientConnection currentOwnerConnection = ownerConnection; if (currentOwnerConnection == null) { return; } IOUtil.closeResource(currentOwnerConnection); markAsClosed(); } } }
/** @author mdogan 5/15/13 */ public final class ClientClusterServiceImpl implements ClientClusterService { private static final ILogger logger = Logger.getLogger(ClientClusterService.class); private static int RETRY_COUNT = 20; private static int RETRY_WAIT_TIME = 500; private final HazelcastClient client; private final ClusterListenerThread clusterThread; private final AtomicReference<Map<Address, MemberImpl>> membersRef = new AtomicReference<Map<Address, MemberImpl>>(); private final ConcurrentMap<String, MembershipListener> listeners = new ConcurrentHashMap<String, MembershipListener>(); private final boolean redoOperation; private final Credentials credentials; private volatile ClientPrincipal principal; private volatile boolean active = false; public ClientClusterServiceImpl(HazelcastClient client) { this.client = client; clusterThread = new ClusterListenerThread(client.getThreadGroup(), client.getName() + ".cluster-listener"); final ClientConfig clientConfig = getClientConfig(); redoOperation = clientConfig.isRedoOperation(); credentials = clientConfig.getCredentials(); final List<ListenerConfig> listenerConfigs = client.getClientConfig().getListenerConfigs(); if (listenerConfigs != null && !listenerConfigs.isEmpty()) { for (ListenerConfig listenerConfig : listenerConfigs) { EventListener listener = listenerConfig.getImplementation(); if (listener == null) { try { listener = ClassLoaderUtil.newInstance( clientConfig.getClassLoader(), listenerConfig.getClassName()); } catch (Exception e) { logger.severe(e); } } if (listener instanceof MembershipListener) { addMembershipListener((MembershipListener) listener); } } } } public MemberImpl getMember(Address address) { final Map<Address, MemberImpl> members = membersRef.get(); return members != null ? members.get(address) : null; } public MemberImpl getMember(String uuid) { final Collection<MemberImpl> memberList = getMemberList(); for (MemberImpl member : memberList) { if (uuid.equals(member.getUuid())) { return member; } } return null; } public Collection<MemberImpl> getMemberList() { final Map<Address, MemberImpl> members = membersRef.get(); return members != null ? members.values() : Collections.<MemberImpl>emptySet(); } public Address getMasterAddress() { final Collection<MemberImpl> memberList = getMemberList(); return !memberList.isEmpty() ? memberList.iterator().next().getAddress() : null; } public int getSize() { return getMemberList().size(); } public long getClusterTime() { return Clock.currentTimeMillis(); } <T> T sendAndReceive(Object obj) throws IOException { return _sendAndReceive(randomConnectionFactory, obj); } <T> T sendAndReceive(final Address address, Object obj) throws IOException { return _sendAndReceive(new TargetConnectionFactory(address), obj); } public Client getLocalClient() { ClientPrincipal cp = principal; Connection conn = clusterThread.conn; return new ClientImpl( cp != null ? cp.getUuid() : null, conn != null ? conn.getLocalSocketAddress() : null); } private interface ConnectionFactory { Connection create() throws IOException; } private final ConnectionFactory randomConnectionFactory = new ConnectionFactory() { public Connection create() throws IOException { return getRandomConnection(); } }; private class TargetConnectionFactory implements ConnectionFactory { final Address target; private TargetConnectionFactory(Address target) { this.target = target; } public Connection create() throws IOException { return getConnection(target); } } private <T> T _sendAndReceive(ConnectionFactory connectionFactory, Object obj) throws IOException { while (active) { Connection conn = null; boolean release = true; try { conn = connectionFactory.create(); final SerializationService serializationService = getSerializationService(); final Data request = serializationService.toData(obj); conn.write(request); final Data response = conn.read(); final Object result = serializationService.toObject(response); return ErrorHandler.returnResultOrThrowException(result); } catch (Exception e) { if (e instanceof IOException) { if (logger.isFinestEnabled()) { logger.finest("Error on connection... conn: " + conn + ", error: " + e); } IOUtil.closeResource(conn); release = false; } if (ErrorHandler.isRetryable(e)) { if (redoOperation || obj instanceof RetryableRequest) { if (logger.isFinestEnabled()) { logger.finest("Retrying " + obj + ", last-conn: " + conn + ", last-error: " + e); } beforeRetry(); continue; } } if (e instanceof IOException && !active) { continue; } throw ExceptionUtil.rethrow(e, IOException.class); } finally { if (release && conn != null) { conn.release(); } } } throw new HazelcastInstanceNotActiveException(); } public <T> T sendAndReceiveFixedConnection(Connection conn, Object obj) throws IOException { final SerializationService serializationService = getSerializationService(); final Data request = serializationService.toData(obj); conn.write(request); final Data response = conn.read(); final Object result = serializationService.toObject(response); return ErrorHandler.returnResultOrThrowException(result); } private SerializationService getSerializationService() { return client.getSerializationService(); } private ClientConnectionManager getConnectionManager() { return client.getConnectionManager(); } private Connection getRandomConnection() throws IOException { return getConnection(null); } private Connection getConnection(Address address) throws IOException { if (!client.getLifecycleService().isRunning()) { throw new HazelcastInstanceNotActiveException(); } Connection connection = null; int retryCount = RETRY_COUNT; while (connection == null && retryCount > 0) { if (address != null) { connection = client.getConnectionManager().getConnection(address); address = null; } else { connection = client.getConnectionManager().getRandomConnection(); } if (connection == null) { retryCount--; beforeRetry(); } } if (connection == null) { throw new IOException("Unable to connect to " + address); } return connection; } private void beforeRetry() { try { Thread.sleep(RETRY_WAIT_TIME); ((ClientPartitionServiceImpl) client.getClientPartitionService()).refreshPartitions(); } catch (InterruptedException ignored) { } } void sendAndHandle(final Address address, Object obj, ResponseHandler handler) throws IOException { _sendAndHandle(new TargetConnectionFactory(address), obj, handler); } void sendAndHandle(Object obj, ResponseHandler handler) throws IOException { _sendAndHandle(randomConnectionFactory, obj, handler); } private void _sendAndHandle( ConnectionFactory connectionFactory, Object obj, ResponseHandler handler) throws IOException { ResponseStream stream = null; while (stream == null) { if (!active) { throw new HazelcastInstanceNotActiveException(); } Connection conn = null; try { conn = connectionFactory.create(); final SerializationService serializationService = getSerializationService(); final Data request = serializationService.toData(obj); conn.write(request); stream = new ResponseStreamImpl(serializationService, conn); } catch (Exception e) { if (e instanceof IOException) { if (logger.isFinestEnabled()) { logger.finest("Error on connection... conn: " + conn + ", error: " + e); } } if (conn != null) { IOUtil.closeResource(conn); } if (ErrorHandler.isRetryable(e)) { if (redoOperation || obj instanceof RetryableRequest) { if (logger.isFinestEnabled()) { logger.finest("Retrying " + obj + ", last-conn: " + conn + ", last-error: " + e); } beforeRetry(); continue; } } if (e instanceof IOException && !active) { continue; } throw ExceptionUtil.rethrow(e, IOException.class); } } try { handler.handle(stream); } catch (Exception e) { throw ExceptionUtil.rethrow(e, IOException.class); } finally { stream.end(); } } public Authenticator getAuthenticator() { return new ClusterAuthenticator(); } public String addMembershipListener(MembershipListener listener) { final String id = UuidUtil.buildRandomUuidString(); listeners.put(id, listener); return id; } private void initMembershipListener() { for (MembershipListener membershipListener : listeners.values()) { if (membershipListener instanceof InitialMembershipListener) { // TODO: needs sync with membership events... final Cluster cluster = client.getCluster(); ((InitialMembershipListener) membershipListener) .init(new InitialMembershipEvent(cluster, cluster.getMembers())); } } } public boolean removeMembershipListener(String registrationId) { return listeners.remove(registrationId) != null; } public void start() { clusterThread.start(); // TODO: replace with a better wait-notify while (membersRef.get() == null && clusterThread.isAlive()) { try { Thread.sleep(100); } catch (InterruptedException e) { throw new HazelcastException(e); } } initMembershipListener(); active = true; // started } public void stop() { active = false; clusterThread.shutdown(); } private class InitialConnectionCall implements Callable<Connection> { public Connection call() throws Exception { return connectToOne(getConfigAddresses()); } } private class ClusterListenerThread extends Thread { private ClusterListenerThread(ThreadGroup group, String name) { super(group, name); } private volatile Connection conn; private final List<MemberImpl> members = new LinkedList<MemberImpl>(); public void run() { while (!Thread.currentThread().isInterrupted()) { try { if (conn == null) { try { conn = pickConnection(); } catch (Exception e) { logger.severe("Error while connecting to cluster!", e); client.getLifecycleService().shutdown(); return; } } loadInitialMemberList(); listenMembershipEvents(); } catch (Exception e) { if (client.getLifecycleService().isRunning()) { if (logger.isFinestEnabled()) { logger.warning("Error while listening cluster events! -> " + conn, e); } else { logger.warning( "Error while listening cluster events! -> " + conn + ", Error: " + e.toString()); } } IOUtil.closeResource(conn); conn = null; fireConnectionEvent(true); } try { Thread.sleep(1000); } catch (InterruptedException e) { break; } } } private Connection pickConnection() throws Exception { final Collection<InetSocketAddress> addresses = new HashSet<InetSocketAddress>(); if (!members.isEmpty()) { addresses.addAll(getClusterAddresses()); } addresses.addAll(getConfigAddresses()); return connectToOne(addresses); } private void loadInitialMemberList() throws IOException { final SerializationService serializationService = getSerializationService(); final Data request = serializationService.toData(new AddMembershipListenerRequest()); conn.write(request); final Data response = conn.read(); SerializableCollection coll = ErrorHandler.returnResultOrThrowException(serializationService.toObject(response)); Map<String, MemberImpl> prevMembers = Collections.emptyMap(); if (!members.isEmpty()) { prevMembers = new HashMap<String, MemberImpl>(members.size()); for (MemberImpl member : members) { prevMembers.put(member.getUuid(), member); } members.clear(); } for (Data d : coll.getCollection()) { members.add((MemberImpl) serializationService.toObject(d)); } updateMembersRef(); logger.info(membersString()); final List<MembershipEvent> events = new LinkedList<MembershipEvent>(); final Set<Member> eventMembers = Collections.unmodifiableSet(new LinkedHashSet<Member>(members)); for (MemberImpl member : members) { final MemberImpl former = prevMembers.remove(member.getUuid()); if (former == null) { events.add( new MembershipEvent( client.getCluster(), member, MembershipEvent.MEMBER_ADDED, eventMembers)); } } for (MemberImpl member : prevMembers.values()) { events.add( new MembershipEvent( client.getCluster(), member, MembershipEvent.MEMBER_REMOVED, eventMembers)); } for (MembershipEvent event : events) { fireMembershipEvent(event); } } private void listenMembershipEvents() throws IOException { final SerializationService serializationService = getSerializationService(); while (!Thread.currentThread().isInterrupted()) { final Data eventData = conn.read(); final ClientMembershipEvent event = (ClientMembershipEvent) serializationService.toObject(eventData); final MemberImpl member = (MemberImpl) event.getMember(); if (event.getEventType() == MembershipEvent.MEMBER_ADDED) { members.add(member); } else { members.remove(member); getConnectionManager().removeConnectionPool(member.getAddress()); } updateMembersRef(); logger.info(membersString()); fireMembershipEvent( new MembershipEvent( client.getCluster(), member, event.getEventType(), Collections.unmodifiableSet(new LinkedHashSet<Member>(members)))); } } private void fireMembershipEvent(final MembershipEvent event) { client .getClientExecutionService() .execute( new Runnable() { public void run() { for (MembershipListener listener : listeners.values()) { if (event.getEventType() == MembershipEvent.MEMBER_ADDED) { listener.memberAdded(event); } else { listener.memberRemoved(event); } } } }); } private void updateMembersRef() { final Map<Address, MemberImpl> map = new LinkedHashMap<Address, MemberImpl>(members.size()); for (MemberImpl member : members) { map.put(member.getAddress(), member); } membersRef.set(Collections.unmodifiableMap(map)); } private Collection<InetSocketAddress> getClusterAddresses() { final List<InetSocketAddress> socketAddresses = new LinkedList<InetSocketAddress>(); for (MemberImpl member : members) { socketAddresses.add(member.getInetSocketAddress()); } Collections.shuffle(socketAddresses); return socketAddresses; } void setInitialConn(Connection conn) { this.conn = conn; } void shutdown() { interrupt(); final Connection c = conn; if (c != null) { try { c.close(); } catch (IOException e) { logger.warning("Error while closing connection!", e); } } } } private Connection connectToOne(final Collection<InetSocketAddress> socketAddresses) throws Exception { final int connectionAttemptLimit = getClientConfig().getConnectionAttemptLimit(); final ManagerAuthenticator authenticator = new ManagerAuthenticator(); int attempt = 0; Throwable lastError = null; while (true) { final long nextTry = Clock.currentTimeMillis() + getClientConfig().getConnectionAttemptPeriod(); for (InetSocketAddress isa : socketAddresses) { Address address = new Address(isa); try { final Connection connection = getConnectionManager().firstConnection(address, authenticator); active = true; fireConnectionEvent(false); return connection; } catch (IOException e) { active = false; lastError = e; logger.finest("IO error during initial connection...", e); } catch (AuthenticationException e) { active = false; lastError = e; logger.warning("Authentication error on " + address, e); } } if (attempt++ >= connectionAttemptLimit) { break; } final long remainingTime = nextTry - Clock.currentTimeMillis(); logger.warning( String.format( "Unable to get alive cluster connection," + " try in %d ms later, attempt %d of %d.", Math.max(0, remainingTime), attempt, connectionAttemptLimit)); if (remainingTime > 0) { try { Thread.sleep(remainingTime); } catch (InterruptedException e) { break; } } } throw new IllegalStateException("Unable to connect to any address in the config!", lastError); } private void fireConnectionEvent(boolean disconnected) { final LifecycleServiceImpl lifecycleService = (LifecycleServiceImpl) client.getLifecycleService(); final LifecycleState state = disconnected ? LifecycleState.CLIENT_DISCONNECTED : LifecycleState.CLIENT_CONNECTED; lifecycleService.fireLifecycleEvent(state); } private Collection<InetSocketAddress> getConfigAddresses() { final List<InetSocketAddress> socketAddresses = new LinkedList<InetSocketAddress>(); for (String address : getClientConfig().getAddresses()) { socketAddresses.addAll(AddressHelper.getSocketAddresses(address)); } Collections.shuffle(socketAddresses); return socketAddresses; } private ClientConfig getClientConfig() { return client.getClientConfig(); } private class ManagerAuthenticator implements Authenticator { public void auth(Connection connection) throws AuthenticationException, IOException { final Object response = authenticate(connection, credentials, principal, true, true); principal = (ClientPrincipal) response; } } private class ClusterAuthenticator implements Authenticator { public void auth(Connection connection) throws AuthenticationException, IOException { authenticate(connection, credentials, principal, false, false); } } private Object authenticate( Connection connection, Credentials credentials, ClientPrincipal principal, boolean reAuth, boolean firstConnection) throws IOException { AuthenticationRequest auth = new AuthenticationRequest(credentials, principal); auth.setReAuth(reAuth); auth.setFirstConnection(firstConnection); final SerializationService serializationService = getSerializationService(); connection.write(serializationService.toData(auth)); final Data addressData = connection.read(); Address address = ErrorHandler.returnResultOrThrowException(serializationService.toObject(addressData)); connection.setRemoteEndpoint(address); final Data data = connection.read(); return ErrorHandler.returnResultOrThrowException(serializationService.toObject(data)); } public String membersString() { StringBuilder sb = new StringBuilder("\n\nMembers ["); final Collection<MemberImpl> members = getMemberList(); sb.append(members != null ? members.size() : 0); sb.append("] {"); if (members != null) { for (Member member : members) { sb.append("\n\t").append(member); } } sb.append("\n}\n"); return sb.toString(); } }
public class StringStringSyntheticMapTest { private static final ILogger LOGGER = Logger.getLogger(StringStringSyntheticMapTest.class); private enum Operation { PUT, GET } // properties public String basename = StringStringSyntheticMapTest.class.getSimpleName(); public int keyLength = 10; public int valueLength = 10; public int keyCount = 10000; public int valueCount = 10000; public KeyLocality keyLocality = KeyLocality.RANDOM; public int minNumberOfMembers = 0; public double putProb = 0.1; // probes public Probe putProbe; public Probe getProbe; private final OperationSelectorBuilder<Operation> operationSelectorBuilder = new OperationSelectorBuilder<Operation>(); private HazelcastInstance hazelcastInstance; private SyntheticMap<String, String> map; private String[] keys; private String[] values; @Setup public void setUp(TestContext testContext) { hazelcastInstance = testContext.getTargetInstance(); HazelcastInstance targetInstance = testContext.getTargetInstance(); map = targetInstance.getDistributedObject(SyntheticMapService.SERVICE_NAME, "map-" + basename); operationSelectorBuilder .addOperation(Operation.PUT, putProb) .addDefaultOperation(Operation.GET); } @Teardown public void tearDown() { map.destroy(); LOGGER.info(getOperationCountInformation(hazelcastInstance)); } @Warmup(global = false) public void warmup() { waitClusterSize(LOGGER, hazelcastInstance, minNumberOfMembers); keys = generateStringKeys(keyCount, keyLength, keyLocality, hazelcastInstance); values = generateStrings(valueCount, valueLength); Random random = new Random(); for (String key : keys) { String value = values[random.nextInt(valueCount)]; map.put(key, value); } } @RunWithWorker public Worker createWorker() { return new Worker(); } private class Worker extends AbstractWorker<Operation> { public Worker() { super(operationSelectorBuilder); } @Override protected void timeStep(Operation operation) throws Exception { String key = randomKey(); switch (operation) { case PUT: String value = randomValue(); putProbe.started(); map.put(key, value); putProbe.done(); break; case GET: getProbe.started(); map.get(key); getProbe.done(); break; default: throw new UnsupportedOperationException(); } } private String randomKey() { return keys[randomInt(keys.length)]; } private String randomValue() { return values[randomInt(values.length)]; } } public static void main(String[] args) throws Exception { StringStringSyntheticMapTest test = new StringStringSyntheticMapTest(); new TestRunner<StringStringSyntheticMapTest>(test).run(); } }
/** * Small facade to bring together container's pooling mechanism and other vendor-specific calls with * the real implementation classes of this resource adapter */ public class ConnectionFactoryImpl implements HazelcastConnectionFactory { /** identity generator */ private static final AtomicInteger ID_GEN = new AtomicInteger(); /** class LOGGER */ private static final ILogger LOGGER = Logger.getLogger("com.hazelcast.jca"); /** this identity */ private static final long serialVersionUID = -5909363703528221650L; /** Access to this resource adapter infrastructure */ private ManagedConnectionFactoryImpl mcf; /** Container's connection manager - i.e. for pooling */ private ConnectionManager cm; /** JNDI reference - not used */ private Reference ref; private transient int id; public ConnectionFactoryImpl() { setId(ID_GEN.incrementAndGet()); } public ConnectionFactoryImpl(ManagedConnectionFactoryImpl mcf, ConnectionManager cm) { this(); this.mcf = mcf; this.cm = cm; } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnectionFactory#getConnection() */ public HazelcastConnection getConnection() throws ResourceException { LOGGER.finest("getConnection"); return getConnection(null); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnectionFactory#getConnection(javax.resource.cci.ConnectionSpec) */ public HazelcastConnection getConnection(ConnectionSpec connSpec) throws ResourceException { if (LOGGER.isFinestEnabled()) { LOGGER.finest("getConnection spec: " + connSpec); } return (HazelcastConnection) cm.allocateConnection(mcf, null); } /* (non-Javadoc) * @see javax.resource.cci.ConnectionFactory#getMetaData() */ public ResourceAdapterMetaData getMetaData() throws ResourceException { return new ConnectionFactoryMetaData(); } /* (non-Javadoc) * @see javax.resource.cci.ConnectionFactory#getRecordFactory() */ public RecordFactory getRecordFactory() throws ResourceException { return null; } /* (non-Javadoc) * @see javax.resource.Referenceable#setReference(javax.naming.Reference) */ public void setReference(Reference ref) { this.ref = ref; } /* (non-Javadoc) * @see javax.naming.Referenceable#getReference() */ public Reference getReference() throws NamingException { return ref; } @Override public String toString() { return "hazelcast.ConnectionFactoryImpl [" + id + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + id; return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ConnectionFactoryImpl other = (ConnectionFactoryImpl) obj; if (id != other.id) { return false; } return true; } public void setId(int id) { this.id = id; } }