private void invokeRunWithWorkerMethod() throws Exception {
    bindOptionalProperty(this, testCase, OptionalTestProperties.THREAD_COUNT.getPropertyName());

    LOGGER.info(
        format("Spawning %d worker threads for test %s", threadCount, testContext.getTestId()));
    if (threadCount <= 0) {
      return;
    }

    // create instance to get class of worker
    Class workerClass = invokeMethod(testClassInstance, runWithWorkerMethod).getClass();

    Field testContextField = getField(workerClass, "testContext", TestContext.class);
    Field workerProbeField = getField(workerClass, "workerProbe", Probe.class);

    Probe probe = null;
    if (workerProbeField != null) {
      // create one probe per test and inject it in all worker instances of the test
      probe = getOrCreateProbe(testContext.getTestId() + "WorkerProbe", workerProbeField);
    }

    // spawn worker and wait for completion
    IWorker worker = spawnWorkerThreads(testContextField, workerProbeField, probe);

    // call the afterCompletion method on a single instance of the worker
    if (worker != null) {
      worker.afterCompletion();
    }
  }
  @Setup
  public void setup(TestContext testContext) throws Exception {
    this.testContext = testContext;
    HazelcastInstance targetInstance = testContext.getTargetInstance();

    produced = targetInstance.getAtomicLong(basename + "-" + testContext.getTestId() + ":Produced");
    consumed = targetInstance.getAtomicLong(basename + "-" + testContext.getTestId() + ":Consumed");
    workQueue = targetInstance.getQueue(basename + "-" + testContext.getTestId() + ":WorkQueue");
  }
  @Setup
  public void setUp(TestContext testContext) {
    hazelcastInstance = testContext.getTargetInstance();
    HazelcastInstance targetInstance = testContext.getTargetInstance();
    map = targetInstance.getDistributedObject(SyntheticMapService.SERVICE_NAME, "map-" + basename);

    operationSelectorBuilder
        .addOperation(Operation.PUT, putProb)
        .addDefaultOperation(Operation.GET);
  }
  @Setup
  public void setup(TestContext testContext) {
    HazelcastInstance hazelcastInstance = testContext.getTargetInstance();
    results = hazelcastInstance.getList(basename);
    listeners = hazelcastInstance.getList(basename + "listeners");

    cache = CacheUtils.getCache(hazelcastInstance, basename);
    listener = new ICacheEntryListener<Integer, Long>();
    filter = new ICacheEntryEventFilter<Integer, Long>();

    CacheEntryListenerConfiguration<Integer, Long> config =
        new MutableCacheEntryListenerConfiguration<Integer, Long>(
            FactoryBuilder.factoryOf(listener),
            FactoryBuilder.factoryOf(filter),
            false,
            syncEvents);
    cache.registerCacheEntryListener(config);

    builder
        .addOperation(Operation.PUT, put)
        .addOperation(Operation.PUT_EXPIRY, putExpiry)
        .addOperation(Operation.PUT_EXPIRY_ASYNC, putAsyncExpiry)
        .addOperation(Operation.GET_EXPIRY, getExpiry)
        .addOperation(Operation.GET_EXPIRY_ASYNC, getAsyncExpiry)
        .addOperation(Operation.REMOVE, remove)
        .addOperation(Operation.REPLACE, replace);
  }
 @Run
 public void run() {
   ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId());
   for (int i = 0; i < threadCount; i++) {
     spawner.spawn(new Worker());
   }
   spawner.awaitCompletion();
 }
  @Setup
  public void setup(TestContext testContext) {
    HazelcastInstance hazelcastInstance = testContext.getTargetInstance();
    resultsPerWorker = hazelcastInstance.getList(basename + ":ResultMap");

    CacheManager cacheManager = createCacheManager(hazelcastInstance);
    cache = cacheManager.getCache(basename);
  }
  @Setup
  public void setUp(TestContext testContext) {
    map = testContext.getTargetInstance().getMap(basename);

    operationSelectorBuilder
        .addOperation(Operation.PUT, putProbability)
        .addDefaultOperation(Operation.QUERY);
  }
  @Setup
  public void setup(TestContext testContext) throws Exception {
    hazelcastInstance = testContext.getTargetInstance();
    list = hazelcastInstance.getList(basename);

    globalIncrements = hazelcastInstance.getList(basename + "res");
    globalCounter = hazelcastInstance.getList(basename + "report");
  }
 @Run
 public void run() {
   ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId());
   for (int k = 0; k < producerCount; k++) {
     spawner.spawn("ProducerThread", new Producer(k));
   }
   for (int k = 0; k < consumerCount; k++) {
     spawner.spawn("ConsumerThread", new Consumer(k));
   }
   spawner.awaitCompletion();
 }
 @Run
 public void run() {
   ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId());
   for (int i = 0; i < mapIntegrityThreadCount; i++) {
     integrityThreads[i] = new MapIntegrityThread();
     spawner.spawn(integrityThreads[i]);
   }
   for (int i = 0; i < stressThreadCount; i++) {
     spawner.spawn(new StressThread());
   }
   spawner.awaitCompletion();
 }
  @Setup
  public void setup(TestContext testContext) throws Exception {
    this.testContext = testContext;
    targetInstance = testContext.getTargetInstance();
    testId = testContext.getTestId();

    integrityMap = targetInstance.getMap(basename + "Integrity");
    stressMap = targetInstance.getMap(basename + "Stress");

    integrityThreads = new MapIntegrityThread[mapIntegrityThreadCount];
    value = new byte[valueSize];

    Random random = new Random();
    random.nextBytes(value);

    if (mapLoad && isMemberNode(targetInstance)) {
      PartitionService partitionService = targetInstance.getPartitionService();
      final Set<Partition> partitionSet = partitionService.getPartitions();
      for (Partition partition : partitionSet) {
        while (partition.getOwner() == null) {
          Thread.sleep(1000);
        }
      }
      LOGGER.info(testId + ": " + partitionSet.size() + " partitions");

      Member localMember = targetInstance.getCluster().getLocalMember();
      for (int i = 0; i < totalIntegrityKeys; i++) {
        Partition partition = partitionService.getPartition(i);
        if (localMember.equals(partition.getOwner())) {
          integrityMap.put(i, value);
        }
      }
      LOGGER.info(
          testId + ": integrityMap=" + integrityMap.getName() + " size=" + integrityMap.size());

      Config config = targetInstance.getConfig();
      MapConfig mapConfig = config.getMapConfig(integrityMap.getName());
      LOGGER.info(testId + ": " + mapConfig);
    }
  }
  @Setup
  public void setUp(TestContext testContext) throws Exception {
    HazelcastInstance targetInstance = testContext.getTargetInstance();
    map = targetInstance.getMap(basename);
    operationCounterList = targetInstance.getList(basename + "OperationCounter");

    operationSelectorBuilder
        .addOperation(Operation.PREDICATE_BUILDER, predicateBuilderProb)
        .addOperation(Operation.SQL_STRING, sqlStringProb)
        .addOperation(Operation.PAGING_PREDICATE, pagePredicateProb)
        .addOperation(Operation.UPDATE_EMPLOYEE, updateEmployeeProb)
        .addOperation(Operation.DESTROY_MAP, destroyProb);
  }
  private IWorker spawnWorkerThreads(Field testContextField, Field workerProbeField, Probe probe)
      throws Exception {
    IWorker worker = null;

    ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId());
    for (int i = 0; i < threadCount; i++) {
      worker = invokeMethod(testClassInstance, runWithWorkerMethod);

      if (testContextField != null) {
        setFieldValue(worker, testContextField, testContext);
      }
      if (workerProbeField != null) {
        setFieldValue(worker, workerProbeField, probe);
      }

      bindOptionalProperty(
          worker, testCase, OptionalTestProperties.LOG_FREQUENCY.getPropertyName());

      spawner.spawn(worker);
    }
    spawner.awaitCompletion();

    return worker;
  }
 @Setup
 public void setup(TestContext testContext) throws Exception {
   this.testContext = testContext;
   map = testContext.getTargetInstance().getMap(basename);
 }