private static void sleep() {
   try {
     Thread.sleep(Timeout.standardTimeoutMillis() / 4);
   } catch (InterruptedException e) {
     throw new OpenGammaRuntimeException("interrupted", e);
   }
 }
 private static void join(final CyclicBarrier barrier) {
   try {
     barrier.await(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
   } catch (Exception e) {
     throw new OpenGammaRuntimeException("interrupted", e);
   }
 }
Esempio n. 3
0
 @Test(expectedExceptions = {OpenGammaRuntimeException.class})
 public void testSnapshotTimeoutOperation() {
   final FireHose fireHose = new FireHose();
   final FireHoseLiveDataServer liveDataServer =
       new FireHoseLiveDataServer(ExternalSchemes.SURF, fireHose);
   liveDataServer.setMarketDataTimeout(Timeout.standardTimeoutMillis() / 4, TimeUnit.MILLISECONDS);
   liveDataServer.start();
   try {
     liveDataServer.doSnapshot("Foo");
   } finally {
     liveDataServer.stop();
   }
 }
Esempio n. 4
0
 public void testMissingSubscription() throws InterruptedException {
   final FireHose fireHose = new FireHose();
   final FireHoseLiveDataServer liveDataServer =
       new FireHoseLiveDataServer(ExternalSchemes.SURF, fireHose);
   final BlockingQueue<LiveDataValueUpdateBean> updates =
       connect(liveDataServer, Integer.MAX_VALUE);
   liveDataServer.start();
   try {
     assertNotNull(liveDataServer.subscribe("Foo"));
     final LiveDataValueUpdateBean update =
         updates.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
     assertNull(update);
   } finally {
     liveDataServer.stop();
   }
 }
 /** Allow the job to finish, then call {@link Future#cancel}. */
 @Test(dataProvider = "executors")
 public void testJobFinish(DependencyGraphExecutorFactory<?> factory) throws Exception {
   s_logger.info("testJobFinish");
   Future<?> job = executeTestJob(factory);
   assertNotNull(job);
   for (int i = 0; i < JOB_FINISH_TIME / SLEEP_TIME; i++) {
     if (jobFinished()) {
       job.get(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
       assertFalse(job.isCancelled());
       assertTrue(job.isDone());
       s_logger.info("Job finished in {}", i);
       return;
     }
     sleep();
   }
   Assert.fail("Job didn't finish in time available");
 }
Esempio n. 6
0
 public void testBasicSubscription() throws InterruptedException {
   final FireHose fireHose = new FireHose();
   final FireHoseLiveDataServer liveDataServer =
       new FireHoseLiveDataServer(ExternalSchemes.SURF, fireHose);
   final BlockingQueue<LiveDataValueUpdateBean> updates =
       connect(liveDataServer, Integer.MAX_VALUE);
   liveDataServer.start();
   try {
     final MutableFudgeMsg msg = FudgeContext.GLOBAL_DEFAULT.newMessage();
     msg.add("X", "Y");
     fireHose.storeValue("Foo", msg);
     assertNotNull(liveDataServer.subscribe("Foo"));
     final LiveDataValueUpdateBean update =
         updates.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
     assertNotNull(update);
     assertEquals(update.getFields().getString("X"), "Y");
   } finally {
     liveDataServer.stop();
   }
 }
Esempio n. 7
0
 @Test(invocationCount = 3, successPercentage = 25)
 public void testRapidUpdates() throws InterruptedException {
   // If the live data server abstraction is slow to consume the fire hose, updates will be lost
   // and the
   // most recent values should win.
   final FireHose fireHose = new FireHose();
   final FireHoseLiveDataServer liveDataServer =
       new FireHoseLiveDataServer(ExternalSchemes.SURF, fireHose);
   final BlockingQueue<LiveDataValueUpdateBean> updates = connect(liveDataServer, 1);
   liveDataServer.start();
   try {
     FireHoseLiveDataServer.getExecutorService()
         .submit(
             new Runnable() {
               @Override
               public void run() {
                 try {
                   Thread.sleep(Timeout.standardTimeoutMillis() / 4);
                   s_logger.debug("Generating updates");
                   for (int i = 0; i < 100; i++) {
                     final MutableFudgeMsg msg = FudgeContext.GLOBAL_DEFAULT.newMessage();
                     msg.add("X", i);
                     fireHose.storeValue("Foo", msg);
                     if (i == 50) {
                       s_logger.debug("Pausing at 50");
                       Thread.sleep(Timeout.standardTimeoutMillis());
                     }
                   }
                   s_logger.debug("Updates complete");
                 } catch (InterruptedException e) {
                   throw new OpenGammaRuntimeException("Interrupted", e);
                 }
               }
             });
     liveDataServer.subscribe("Foo");
     // An early value (<=50) will be written to the queue
     // A second will be blocked (<=50) until we have finished polling and then written
     // A third will be blocked (<=50) while we are paused
     // During that pause, values up to 50 will be produced, and 50 ready for delivery.
     // We will thus see four updates less than or equal to 50.
     // We will then see one or more values up to 99.
     int low = 0;
     int high = 0;
     do {
       final LiveDataValueUpdateBean update =
           updates.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
       s_logger.info("Got update {}", update);
       final int v = update.getFields().getInt("X");
       if (v <= 50) {
         low++;
         if (low == 1) {
           Thread.sleep(Timeout.standardTimeoutMillis() / 2);
         }
       } else {
         high++;
         if (v == 99) {
           break;
         }
       }
     } while (true);
     s_logger.info("Low = {}, High = {}", low, high);
     assertEquals(low, 4);
     assertTrue((high > 0) && (high < 50));
   } finally {
     liveDataServer.stop();
   }
 }
/** Tests ViewClient */
@Test
public class ViewClientTest {

  private static final long TIMEOUT = Timeout.standardTimeoutMillis();

  @Test
  public void testSingleViewMultipleClients() {
    ViewProcessorTestEnvironment env = new ViewProcessorTestEnvironment();
    env.init();
    ViewProcessorImpl vp = env.getViewProcessor();
    vp.start();

    ViewClient client1 = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    assertNotNull(client1.getUniqueId());

    client1.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));
    ViewProcessImpl client1Process = env.getViewProcess(vp, client1.getUniqueId());
    assertTrue(client1Process.getState() == ViewProcessState.RUNNING);

    ViewClient client2 = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    assertNotNull(client2.getUniqueId());
    assertFalse(client1.getUniqueId().equals(client2.getUniqueId()));

    client2.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));
    ViewProcessImpl client2Process = env.getViewProcess(vp, client2.getUniqueId());
    assertEquals(client1Process, client2Process);
    assertTrue(client2Process.getState() == ViewProcessState.RUNNING);

    client1.detachFromViewProcess();
    assertTrue(client2Process.getState() == ViewProcessState.RUNNING);

    client2.detachFromViewProcess();
    assertTrue(client2Process.getState() == ViewProcessState.TERMINATED);

    client1.shutdown();
    client2.shutdown();
  }

  @Test
  public void testCascadingShutdown() {
    ViewProcessorTestEnvironment env = new ViewProcessorTestEnvironment();
    env.init();
    ViewProcessorImpl vp = env.getViewProcessor();
    vp.start();

    ViewClient client1 = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    client1.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));

    ViewClient client2 = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    client2.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));

    ViewProcessImpl view = env.getViewProcess(vp, client1.getUniqueId());

    vp.stop();

    assertFalse(vp.isRunning());
    assertFalse(view.isRunning());
    assertTrue(view.getState() == ViewProcessState.TERMINATED);

    assertFalse(client1.isAttached());
    assertFalse(client2.isAttached());

    client1.shutdown();
    client2.shutdown();
  }

  @Test
  public void testComputationResultsFlow() throws InterruptedException {
    ViewProcessorTestEnvironment env = new ViewProcessorTestEnvironment();
    SynchronousInMemoryLKVSnapshotProvider marketDataProvider =
        new SynchronousInMemoryLKVSnapshotProvider();
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 0);
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 0);
    env.setMarketDataProvider(marketDataProvider);
    env.init();

    ViewProcessorImpl vp = env.getViewProcessor();
    vp.start();

    ViewClient client = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    client.setFragmentResultMode(ViewResultMode.FULL_ONLY);
    TestViewResultListener resultListener = new TestViewResultListener();
    client.setResultListener(resultListener);

    // Client not attached - should not have been listening to anything that might have been going
    // on
    assertEquals(0, resultListener.getQueueSize());

    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 1);
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 2);

    assertEquals(0, resultListener.getQueueSize());

    client.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));

    ViewProcessImpl viewProcess = env.getViewProcess(vp, client.getUniqueId());
    assertTrue(viewProcess.getState() == ViewProcessState.RUNNING);

    resultListener.assertViewDefinitionCompiled(TIMEOUT);
    resultListener.assertCycleInitiated(TIMEOUT);
    ViewResultModel result1Fragment =
        resultListener.getCycleFragmentCompleted(TIMEOUT).getFullFragment();
    assertNotNull(result1Fragment);
    ViewComputationResultModel result1 = resultListener.getCycleCompleted(TIMEOUT).getFullResult();
    assertNotNull(result1);

    Map<ValueRequirement, Object> expected = new HashMap<ValueRequirement, Object>();
    expected.put(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 1);
    expected.put(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 2);
    assertComputationResult(expected, env.getCalculationResult(result1));
    assertComputationResult(expected, env.getCalculationResult(result1Fragment));
    assertTrue(client.isResultAvailable());
    assertEquals(result1, client.getLatestResult());

    client.pause();

    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 3);
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 4);

    env.getCurrentComputationJob(viewProcess)
        .marketDataChanged(); // Need to get it to perform another cycle

    // Should have been merging results received in the meantime
    client.resume();
    resultListener.assertCycleInitiated(TIMEOUT);
    resultListener.assertCycleFragmentCompleted(TIMEOUT);
    ViewComputationResultModel result2 = resultListener.getCycleCompleted(TIMEOUT).getFullResult();

    expected = new HashMap<ValueRequirement, Object>();
    expected.put(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 3);
    expected.put(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 4);
    assertComputationResult(expected, env.getCalculationResult(result2));
  }

  @Test
  public void testDeltaResults() throws InterruptedException {
    ViewProcessorTestEnvironment env = new ViewProcessorTestEnvironment();
    SynchronousInMemoryLKVSnapshotProvider marketDataProvider =
        new SynchronousInMemoryLKVSnapshotProvider();
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), 0);
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), 0);
    env.setMarketDataProvider(marketDataProvider);
    env.init();

    ViewProcessorImpl vp = env.getViewProcessor();
    vp.start();

    ViewClient client = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    client.setResultMode(ViewResultMode.DELTA_ONLY);
    client.setFragmentResultMode(ViewResultMode.FULL_ONLY);

    TestViewResultListener resultListener = new TestViewResultListener();
    client.setResultListener(resultListener);

    // Client not attached - should not have been listening to anything that might have been going
    // on
    assertEquals(0, resultListener.getQueueSize());

    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), 1);
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), 2);

    assertEquals(0, resultListener.getQueueSize());

    client.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));
    resultListener.assertViewDefinitionCompiled(TIMEOUT);
    resultListener.assertCycleInitiated(TIMEOUT);
    resultListener.assertCycleFragmentCompleted(TIMEOUT);
    ViewDeltaResultModel result1 = resultListener.getCycleCompleted(TIMEOUT).getDeltaResult();
    assertNotNull(result1);

    Map<ValueRequirement, Object> expected = new HashMap<ValueRequirement, Object>();
    expected.put(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 1);
    expected.put(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 2);
    assertComputationResult(expected, env.getCalculationResult(result1));

    client.pause();

    // Just update one live data value, and only this one value should end up in the delta
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), 3);

    assertEquals(0, resultListener.getQueueSize());
    ViewProcessImpl viewProcess = env.getViewProcess(vp, client.getUniqueId());
    env.getCurrentComputationJob(viewProcess)
        .marketDataChanged(); // Need to get it to perform another cycle

    // Should have been merging results received in the meantime
    client.resume();
    resultListener.assertCycleInitiated(TIMEOUT);
    resultListener.assertCycleFragmentCompleted(TIMEOUT);
    ViewDeltaResultModel result2 = resultListener.getCycleCompleted(TIMEOUT).getDeltaResult();

    expected = new HashMap<ValueRequirement, Object>();
    expected.put(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 3);
    assertComputationResult(expected, env.getCalculationResult(result2));
  }

  @Test
  public void testStates() throws InterruptedException {
    ViewProcessorTestEnvironment env = new ViewProcessorTestEnvironment();
    SynchronousInMemoryLKVSnapshotProvider marketDataProvider =
        new SynchronousInMemoryLKVSnapshotProvider();
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 0);
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 0);
    env.setMarketDataProvider(marketDataProvider);
    env.init();

    ViewProcessorImpl vp = env.getViewProcessor();
    vp.start();

    ViewClient client1 = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    client1.setFragmentResultMode(ViewResultMode.FULL_ONLY);
    TestViewResultListener client1ResultListener = new TestViewResultListener();
    client1.setResultListener(client1ResultListener);

    assertEquals(0, client1ResultListener.getQueueSize());

    client1.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));

    // Wait for first computation cycle
    client1ResultListener.assertViewDefinitionCompiled(TIMEOUT);
    client1ResultListener.expectNextCall(CycleInitiatedCall.class, TIMEOUT);
    client1ResultListener.assertCycleFragmentCompleted(TIMEOUT);
    client1ResultListener.assertCycleCompleted(TIMEOUT);

    ViewClient client2 = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    client2.setFragmentResultMode(ViewResultMode.FULL_ONLY);
    TestViewResultListener client2ResultListener = new TestViewResultListener();
    client2.setResultListener(client2ResultListener);

    assertEquals(0, client2ResultListener.getQueueSize());
    client2.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));

    // Initial result should be pushed through
    client2ResultListener.assertViewDefinitionCompiled(TIMEOUT);
    client2ResultListener.assertCycleCompleted(TIMEOUT);

    ViewProcessImpl viewProcess1 = env.getViewProcess(vp, client1.getUniqueId());
    ViewProcessImpl viewProcess2 = env.getViewProcess(vp, client2.getUniqueId());
    assertEquals(viewProcess1, viewProcess2);

    client1.pause();
    client1ResultListener.assertNoCalls(TIMEOUT);

    // Now client 1 is paused, so any changes should be batched.
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 1);
    env.getCurrentComputationJob(viewProcess1).marketDataChanged();
    client2ResultListener.assertCycleInitiated(TIMEOUT);
    client2ResultListener.assertCycleFragmentCompleted(TIMEOUT);
    client2ResultListener.assertCycleCompleted(TIMEOUT);
    assertEquals(0, client2ResultListener.getQueueSize());
    client1ResultListener.assertNoCalls(TIMEOUT);

    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 2);
    env.getCurrentComputationJob(viewProcess1).marketDataChanged();
    client2ResultListener.assertCycleInitiated(TIMEOUT);
    client2ResultListener.assertCycleFragmentCompleted(TIMEOUT);
    client2ResultListener.assertCycleCompleted(TIMEOUT);
    assertEquals(0, client2ResultListener.getQueueSize());
    client1ResultListener.assertNoCalls(TIMEOUT);

    // Resuming should release the most recent result to the client
    client1.resume();
    client1ResultListener.getCycleInitiated(TIMEOUT).getCycleInfo();
    ViewComputationResultModel result2Fragment =
        client1ResultListener.getCycleFragmentCompleted(TIMEOUT).getFullFragment();
    ViewComputationResultModel result2 =
        client1ResultListener.getCycleCompleted(TIMEOUT).getFullResult();
    Map<ValueRequirement, Object> expected = new HashMap<ValueRequirement, Object>();
    expected.put(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 2);
    expected.put(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 0);
    assertComputationResult(expected, env.getCalculationResult(result2Fragment));
    assertComputationResult(expected, env.getCalculationResult(result2));

    // Changes should now propagate straight away to both listeners
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 3);
    env.getCurrentComputationJob(viewProcess1).marketDataChanged();
    client1ResultListener.assertCycleInitiated(TIMEOUT);
    client2ResultListener.assertCycleInitiated(TIMEOUT);
    client2ResultListener.assertCycleFragmentCompleted(TIMEOUT);
    client2ResultListener.assertCycleCompleted(TIMEOUT);
    ViewComputationResultModel result3Fragment =
        client1ResultListener.getCycleFragmentCompleted(TIMEOUT).getFullFragment();
    ViewComputationResultModel result3 =
        client1ResultListener.getCycleCompleted(TIMEOUT).getFullResult();
    expected = new HashMap<ValueRequirement, Object>();
    expected.put(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 3);
    expected.put(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 0);
    assertComputationResult(expected, env.getCalculationResult(result3Fragment));
    assertComputationResult(expected, env.getCalculationResult(result3));

    // Pause results again and we should be back to merging both whole cycle results and fragments
    client1.pause();
    client2ResultListener.assertNoCalls(TIMEOUT);
    client1ResultListener.assertNoCalls(TIMEOUT);

    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 1);
    env.getCurrentComputationJob(viewProcess1).marketDataChanged();
    client2ResultListener.assertCycleInitiated(TIMEOUT);
    client2ResultListener.assertCycleFragmentCompleted(TIMEOUT);
    client2ResultListener.assertCycleCompleted(TIMEOUT);
    assertEquals(0, client2ResultListener.getQueueSize());
    client1ResultListener.assertNoCalls(TIMEOUT);

    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 2);
    env.getCurrentComputationJob(viewProcess1).marketDataChanged();
    client2ResultListener.assertCycleInitiated(TIMEOUT);
    client2ResultListener.assertCycleFragmentCompleted(TIMEOUT);
    client2ResultListener.assertCycleCompleted(TIMEOUT);
    assertEquals(0, client2ResultListener.getQueueSize());
    client1ResultListener.assertNoCalls(TIMEOUT);

    // Start results again
    client1.resume();
    client1ResultListener.assertCycleInitiated(TIMEOUT);
    ViewComputationResultModel result4Fragment =
        client1ResultListener.getCycleFragmentCompleted(TIMEOUT).getFullFragment();
    ViewComputationResultModel result4 =
        client1ResultListener.getCycleCompleted(TIMEOUT).getFullResult();
    assertEquals(0, client1ResultListener.getQueueSize());
    client2ResultListener.assertNoCalls(TIMEOUT);
    expected = new HashMap<ValueRequirement, Object>();
    expected.put(ViewProcessorTestEnvironment.getPrimitive1(), (byte) 3);
    expected.put(ViewProcessorTestEnvironment.getPrimitive2(), (byte) 2);
    assertComputationResult(expected, env.getCalculationResult(result4Fragment));
    assertComputationResult(expected, env.getCalculationResult(result4));

    client1.detachFromViewProcess();
    client2ResultListener.assertNoCalls(TIMEOUT);
    client1ResultListener.assertNoCalls(TIMEOUT);

    client1.shutdown();
    client2.shutdown();
  }

  @Test(expectedExceptions = IllegalStateException.class)
  public void testUseTerminatedClient() {
    ViewProcessorTestEnvironment env = new ViewProcessorTestEnvironment();
    env.init();

    ViewProcessorImpl vp = env.getViewProcessor();
    vp.start();

    ViewClient client = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    client.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));

    ViewProcess viewProcess = env.getViewProcess(vp, client.getUniqueId());

    client.shutdown();

    assertEquals(ViewProcessState.TERMINATED, viewProcess.getState());

    client.pause();
  }

  @Test
  public void testChangeOfListeners() throws InterruptedException {
    ViewProcessorTestEnvironment env = new ViewProcessorTestEnvironment();
    SynchronousInMemoryLKVSnapshotProvider marketDataProvider =
        new SynchronousInMemoryLKVSnapshotProvider();
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), 0);
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), 0);
    env.setMarketDataProvider(marketDataProvider);
    env.init();

    ViewProcessorImpl vp = env.getViewProcessor();
    vp.start();

    ViewClient client = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);
    client.setFragmentResultMode(ViewResultMode.FULL_ONLY);
    TestViewResultListener resultListener1 = new TestViewResultListener();
    client.setResultListener(resultListener1);

    // Start live computation and collect the initial result
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), 2);

    client.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));
    ViewProcessImpl viewProcess = env.getViewProcess(vp, client.getUniqueId());
    assertEquals(ViewProcessState.RUNNING, viewProcess.getState());

    ViewComputationJob recalcJob = env.getCurrentComputationJob(viewProcess);
    resultListener1.assertViewDefinitionCompiled(TIMEOUT);
    resultListener1.assertCycleInitiated(TIMEOUT);
    resultListener1.assertCycleFragmentCompleted(TIMEOUT);
    resultListener1.assertCycleCompleted(TIMEOUT);
    assertEquals(0, resultListener1.getQueueSize());

    // Push through a second result
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), 3);
    recalcJob.marketDataChanged();
    resultListener1.assertCycleInitiated(TIMEOUT);
    resultListener1.assertCycleFragmentCompleted(TIMEOUT);
    resultListener1.assertCycleCompleted(TIMEOUT);
    assertEquals(0, resultListener1.getQueueSize());

    // Change listener
    TestViewResultListener resultListener2 = new TestViewResultListener();
    client.setResultListener(resultListener2);

    // Push through a result which should arrive at the new listeners
    recalcJob.marketDataChanged();
    resultListener2.assertCycleInitiated(TIMEOUT);
    resultListener2.assertCycleFragmentCompleted(TIMEOUT);
    resultListener2.assertCycleCompleted(TIMEOUT);
    assertEquals(0, resultListener1.getQueueSize());
    assertEquals(0, resultListener2.getQueueSize());

    client.setResultListener(null);
    client.shutdown();
    assertEquals(ViewProcessState.TERMINATED, viewProcess.getState());

    vp.stop();
  }

  @Test
  public void testOldRecalculationThreadDies() throws InterruptedException {
    ViewProcessorTestEnvironment env = new ViewProcessorTestEnvironment();
    SynchronousInMemoryLKVSnapshotProvider marketDataProvider =
        new SynchronousInMemoryLKVSnapshotProvider();
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive1(), 0);
    marketDataProvider.addValue(ViewProcessorTestEnvironment.getPrimitive2(), 0);
    env.setMarketDataProvider(marketDataProvider);
    env.init();

    ViewProcessorImpl vp = env.getViewProcessor();
    vp.start();

    ViewClient client = vp.createViewClient(ViewProcessorTestEnvironment.TEST_USER);

    client.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));
    ViewProcessImpl viewProcess1 = env.getViewProcess(vp, client.getUniqueId());

    ViewComputationJob recalcJob1 = env.getCurrentComputationJob(viewProcess1);
    Thread recalcThread1 = env.getCurrentComputationThread(viewProcess1);
    assertFalse(recalcJob1.isTerminated());
    assertTrue(recalcThread1.isAlive());

    client.detachFromViewProcess();
    client.attachToViewProcess(
        env.getViewDefinition().getUniqueId(), ExecutionOptions.infinite(MarketData.live()));
    ViewProcessImpl viewProcess2 = env.getViewProcess(vp, client.getUniqueId());
    ViewComputationJob recalcJob2 = env.getCurrentComputationJob(viewProcess2);
    Thread recalcThread2 = env.getCurrentComputationThread(viewProcess2);

    assertFalse(viewProcess1 == viewProcess2);
    assertTrue(recalcJob1.isTerminated());
    assertFalse(recalcJob2.isTerminated());

    recalcThread1.join(TIMEOUT);
    assertFalse(recalcThread1.isAlive());
    assertTrue(recalcThread2.isAlive());

    vp.stop();

    assertTrue(recalcJob2.isTerminated());
  }

  private void assertComputationResult(
      Map<ValueRequirement, Object> expected, ViewCalculationResultModel result) {
    assertNotNull(result);
    Set<ValueRequirement> remaining = new HashSet<ValueRequirement>(expected.keySet());
    Collection<ComputationTargetSpecification> targets = result.getAllTargets();
    for (ComputationTargetSpecification target : targets) {
      Map<Pair<String, ValueProperties>, ComputedValue> values = result.getValues(target);
      for (Map.Entry<Pair<String, ValueProperties>, ComputedValue> value : values.entrySet()) {
        String valueName = value.getKey().getFirst();
        ValueRequirement requirement =
            new ValueRequirement(valueName, target.getType(), target.getUniqueId());
        assertTrue(expected.containsKey(requirement));

        assertEquals(expected.get(requirement), value.getValue().getValue());
        remaining.remove(requirement);
      }
    }
    assertEquals(Collections.emptySet(), remaining);
  }

  /**
   * Avoids the ConcurrentHashMap-based implementation of InMemoryLKVSnapshotProvider, where the LKV
   * map can appear to lag behind if accessed from a different thread immediately after a change.
   */
  private static class SynchronousInMemoryLKVSnapshotProvider extends AbstractMarketDataProvider
      implements MarketDataInjector, MarketDataAvailabilityProvider {

    private static final Logger s_logger =
        LoggerFactory.getLogger(SynchronousInMemoryLKVSnapshotProvider.class);

    private final Map<ValueRequirement, Object> _lastKnownValues =
        new HashMap<ValueRequirement, Object>();
    private final MarketDataPermissionProvider _permissionProvider =
        new PermissiveMarketDataPermissionProvider();

    @Override
    public void subscribe(UserPrincipal user, ValueRequirement valueRequirement) {
      subscribe(user, Collections.singleton(valueRequirement));
    }

    @Override
    public void subscribe(UserPrincipal user, Set<ValueRequirement> valueRequirements) {
      // No actual subscription to make, but we still need to acknowledge it.
      subscriptionSucceeded(valueRequirements);
    }

    @Override
    public void unsubscribe(UserPrincipal user, ValueRequirement valueRequirement) {}

    @Override
    public void unsubscribe(UserPrincipal user, Set<ValueRequirement> valueRequirements) {}

    // -----------------------------------------------------------------------
    @Override
    public MarketDataAvailabilityProvider getAvailabilityProvider() {
      return this;
    }

    @Override
    public MarketDataPermissionProvider getPermissionProvider() {
      return _permissionProvider;
    }

    // -----------------------------------------------------------------------
    @Override
    public boolean isCompatible(MarketDataSpecification marketDataSpec) {
      return true;
    }

    @Override
    public MarketDataSnapshot snapshot(MarketDataSpecification marketDataSpec) {
      synchronized (_lastKnownValues) {
        Map<ValueRequirement, Object> snapshotValues =
            new HashMap<ValueRequirement, Object>(_lastKnownValues);
        return new SynchronousInMemoryLKVSnapshot(snapshotValues);
      }
    }

    // -----------------------------------------------------------------------
    @Override
    public void addValue(ValueRequirement requirement, Object value) {
      s_logger.debug("Setting {} = {}", requirement, value);
      synchronized (_lastKnownValues) {
        _lastKnownValues.put(requirement, value);
      }
      // Don't notify listeners of the change - we'll kick off a computation cycle manually in the
      // tests
    }

    @Override
    public void addValue(ExternalId identifier, String valueName, Object value) {}

    @Override
    public void removeValue(ValueRequirement valueRequirement) {
      synchronized (_lastKnownValues) {
        _lastKnownValues.remove(valueRequirement);
      }
      // Don't notify listeners of the change - we'll kick off a computation cycle manually in the
      // tests
    }

    @Override
    public void removeValue(ExternalId identifier, String valueName) {}

    // -----------------------------------------------------------------------
    @Override
    public MarketDataAvailability getAvailability(final ValueRequirement requirement) {
      synchronized (_lastKnownValues) {
        return _lastKnownValues.containsKey(requirement)
            ? MarketDataAvailability.AVAILABLE
            : MarketDataAvailability.NOT_AVAILABLE;
      }
    }
  }

  private static class SynchronousInMemoryLKVSnapshot extends AbstractMarketDataSnapshot {

    private final Map<ValueRequirement, Object> _snapshot;
    private final Instant _snapshotTime = Instant.now();

    public SynchronousInMemoryLKVSnapshot(Map<ValueRequirement, Object> snapshot) {
      _snapshot = snapshot;
    }

    @Override
    public UniqueId getUniqueId() {
      return UniqueId.of(
          MARKET_DATA_SNAPSHOT_ID_SCHEME, "SynchronousInMemoryLKVSnapshot:" + getSnapshotTime());
    }

    @Override
    public Instant getSnapshotTimeIndication() {
      return _snapshotTime;
    }

    @Override
    public Instant getSnapshotTime() {
      return _snapshotTime;
    }

    @Override
    public Object query(ValueRequirement requirement) {
      return _snapshot.get(requirement);
    }
  }
}
public class CancelExecutionTest {

  private static final int JOB_SIZE = 100;
  private static final int JOB_FINISH_TIME = (int) Timeout.standardTimeoutMillis();
  private static final int SLEEP_TIME = JOB_FINISH_TIME / 10;
  private static final Logger s_logger = LoggerFactory.getLogger(CancelExecutionTest.class);

  @DataProvider(name = "executors")
  Object[][] data_executors() {
    return new Object[][] {
      {multipleNodeExecutorFactoryManyJobs()},
      {multipleNodeExecutorFactoryOneJob()},
      {new SingleNodeExecutorFactory()},
    };
  }

  private static MultipleNodeExecutorFactory multipleNodeExecutorFactoryOneJob() {
    final MultipleNodeExecutorFactory factory = new MultipleNodeExecutorFactory();
    factory.afterPropertiesSet();
    return factory;
  }

  private static MultipleNodeExecutorFactory multipleNodeExecutorFactoryManyJobs() {
    final MultipleNodeExecutorFactory factory = multipleNodeExecutorFactoryOneJob();
    factory.setMaximumJobItems(JOB_SIZE / 10);
    return factory;
  }

  private final AtomicInteger _functionCount = new AtomicInteger();

  private void sleep() {
    try {
      Thread.sleep(SLEEP_TIME);
    } catch (InterruptedException e) {
    }
  }

  private Future<?> executeTestJob(DependencyGraphExecutorFactory<?> factory) {
    final InMemoryLKVMarketDataProvider marketDataProvider = new InMemoryLKVMarketDataProvider();
    final MarketDataProviderResolver marketDataProviderResolver =
        new SingleMarketDataProviderResolver(
            new SingletonMarketDataProviderFactory(marketDataProvider));
    final InMemoryFunctionRepository functionRepository = new InMemoryFunctionRepository();
    _functionCount.set(0);
    final MockFunction mockFunction =
        new MockFunction(new ComputationTarget("Foo")) {

          @Override
          public Set<ComputedValue> execute(
              FunctionExecutionContext executionContext,
              FunctionInputs inputs,
              ComputationTarget target,
              Set<ValueRequirement> desiredValues) {
            try {
              Thread.sleep(JOB_FINISH_TIME / (JOB_SIZE * 2));
            } catch (InterruptedException e) {
              throw new OpenGammaRuntimeException("Function interrupted", e);
            }
            _functionCount.incrementAndGet();
            return super.execute(executionContext, inputs, target, desiredValues);
          }
        };
    functionRepository.addFunction(mockFunction);
    final FunctionCompilationContext compilationContext = new FunctionCompilationContext();
    final CompiledFunctionService compilationService =
        new CompiledFunctionService(
            functionRepository, new CachingFunctionRepositoryCompiler(), compilationContext);
    compilationService.initialize();
    final FunctionResolver functionResolver = new DefaultFunctionResolver(compilationService);
    final MockSecuritySource securitySource = new MockSecuritySource();
    final MockPositionSource positionSource = new MockPositionSource();
    final ViewComputationCacheSource computationCacheSource =
        new InMemoryViewComputationCacheSource(FudgeContext.GLOBAL_DEFAULT);
    final FunctionInvocationStatisticsGatherer functionInvocationStatistics =
        new DiscardingInvocationStatisticsGatherer();
    final ViewProcessorQueryReceiver viewProcessorQueryReceiver = new ViewProcessorQueryReceiver();
    final ViewProcessorQuerySender viewProcessorQuerySender =
        new ViewProcessorQuerySender(InMemoryRequestConduit.create(viewProcessorQueryReceiver));
    final FunctionExecutionContext executionContext = new FunctionExecutionContext();
    final ComputationTargetResolver targetResolver =
        new DefaultComputationTargetResolver(securitySource, positionSource);
    final JobDispatcher jobDispatcher =
        new JobDispatcher(
            new LocalNodeJobInvoker(
                new LocalCalculationNode(
                    computationCacheSource,
                    compilationService,
                    executionContext,
                    targetResolver,
                    viewProcessorQuerySender,
                    Executors.newCachedThreadPool(),
                    functionInvocationStatistics)));
    final ViewPermissionProvider viewPermissionProvider = new DefaultViewPermissionProvider();
    final GraphExecutorStatisticsGathererProvider graphExecutorStatisticsProvider =
        new DiscardingGraphStatisticsGathererProvider();

    ViewDefinition viewDefinition = new ViewDefinition("TestView", UserPrincipal.getTestUser());
    viewDefinition.addViewCalculationConfiguration(
        new ViewCalculationConfiguration(viewDefinition, "default"));
    MockViewDefinitionRepository viewDefinitionRepository = new MockViewDefinitionRepository();
    viewDefinitionRepository.addDefinition(viewDefinition);

    final ViewProcessContext vpc =
        new ViewProcessContext(
            viewDefinitionRepository,
            viewPermissionProvider,
            marketDataProviderResolver,
            compilationService,
            functionResolver,
            positionSource,
            securitySource,
            new DefaultCachingComputationTargetResolver(
                new DefaultComputationTargetResolver(securitySource, positionSource),
                EHCacheUtils.createCacheManager()),
            computationCacheSource,
            jobDispatcher,
            viewProcessorQueryReceiver,
            factory,
            graphExecutorStatisticsProvider);
    final DependencyGraph graph = new DependencyGraph("Default");
    DependencyNode previous = null;
    for (int i = 0; i < JOB_SIZE; i++) {
      DependencyNode node = new DependencyNode(new ComputationTarget("Foo"));
      node.setFunction(mockFunction);
      if (previous != null) {
        node.addInputNode(previous);
      }
      graph.addDependencyNode(node);
      previous = node;
    }
    final Map<String, DependencyGraph> graphs = new HashMap<String, DependencyGraph>();
    graphs.put(graph.getCalculationConfigurationName(), graph);
    CompiledViewDefinitionWithGraphsImpl viewEvaluationModel =
        new CompiledViewDefinitionWithGraphsImpl(
            viewDefinition, graphs, new SimplePortfolio("Test Portfolio"), 0);
    ViewCycleExecutionOptions cycleOptions = new ViewCycleExecutionOptions();
    cycleOptions.setValuationTime(Instant.ofEpochMillis(1));
    cycleOptions.setMarketDataSpecification(new MarketDataSpecification());
    final SingleComputationCycle cycle =
        new SingleComputationCycle(
            UniqueId.of("Test", "Cycle1"),
            UniqueId.of("Test", "ViewProcess1"),
            vpc,
            viewEvaluationModel,
            cycleOptions,
            VersionCorrection.of(Instant.ofEpochMillis(1), Instant.ofEpochMillis(1)));
    return cycle.getDependencyGraphExecutor().execute(graph, cycle.getStatisticsGatherer());
  }

  private boolean jobFinished() {
    return _functionCount.get() == JOB_SIZE;
  }

  /** Allow the job to finish, then call {@link Future#cancel}. */
  @Test(dataProvider = "executors")
  public void testJobFinish(DependencyGraphExecutorFactory<?> factory) throws Exception {
    s_logger.info("testJobFinish");
    Future<?> job = executeTestJob(factory);
    assertNotNull(job);
    for (int i = 0; i < JOB_FINISH_TIME / SLEEP_TIME; i++) {
      if (jobFinished()) {
        job.get(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
        assertFalse(job.isCancelled());
        assertTrue(job.isDone());
        s_logger.info("Job finished in {}", i);
        return;
      }
      sleep();
    }
    Assert.fail("Job didn't finish in time available");
  }

  /** Call {@link Future#cancel} before the job finishes, with interrupt enabled. */
  @Test(dataProvider = "executors")
  public void testJobCancelWithInterrupt(DependencyGraphExecutorFactory<?> factory) {
    s_logger.info("testJobCancelWithInterrupt");
    Future<?> job = executeTestJob(factory);
    assertNotNull(job);
    job.cancel(true);
    for (int i = 0; i < JOB_FINISH_TIME / SLEEP_TIME; i++) {
      if (jobFinished()) {
        assertTrue(job.isCancelled());
        assertTrue(job.isDone());
        s_logger.info("Job finished in {}", i);
        Assert.fail("Job finished normally despite cancel");
        return;
      }
      sleep();
    }
  }

  /** Call {@link Future#cancel} before the job finishes, with no interrupt. */
  @Test(dataProvider = "executors")
  public void testJobCancelWithoutInterrupt(DependencyGraphExecutorFactory<?> factory) {
    s_logger.info("testJobCancelWithoutInterrupt");
    Future<?> job = executeTestJob(factory);
    assertNotNull(job);
    job.cancel(false);
    for (int i = 0; i < JOB_FINISH_TIME / SLEEP_TIME; i++) {
      if (jobFinished()) {
        assertTrue(job.isCancelled());
        assertTrue(job.isDone());
        s_logger.info("Job finished in {}", i);
        Assert.fail("Job finished normally despite cancel");
        return;
      }
      sleep();
    }
  }
}
  public void testGetSecurity_byUniqueId_a() throws Exception {
    final UniqueId uidA = UniqueId.of("Test", "A");
    final Security secA = Mockito.mock(Security.class);
    final UniqueId uidB = UniqueId.of("Test", "B");
    final Security secB = Mockito.mock(Security.class);
    final UniqueId uidC = UniqueId.of("Test", "C");
    final Security secC = Mockito.mock(Security.class);
    final CyclicBarrier barrier = new CyclicBarrier(4);
    final MockSecuritySource underlying =
        new MockSecuritySource() {

          int _state;

          @Override
          public Security get(final UniqueId uid) {
            assertEquals(_state++, 0);
            join(barrier); // 1
            assertEquals(uid, uidA);
            // Pause for a bit to make sure that the other threads get blocked in their getSecurity
            // methods
            sleep();
            return secA;
          }

          @Override
          public Map<UniqueId, Security> get(final Collection<UniqueId> uids) {
            assertEquals(_state++, 1);
            assertEquals(uids.size(), 2);
            assertTrue(uids.contains(uidB));
            assertTrue(uids.contains(uidC));
            final Map<UniqueId, Security> result = Maps.newHashMapWithExpectedSize(2);
            result.put(uidB, secB);
            result.put(uidC, secC);
            return result;
          }
        };
    final CoalescingSecuritySource coalescing = new CoalescingSecuritySource(underlying);
    // Start three threads. One will do the first write, the other two will be blocked. Then one of
    // the other two will do a second
    // write that includes that required by the third. The third will do no I/O itself.
    final ExecutorService exec = Executors.newCachedThreadPool();
    try {
      final Future<?> a =
          exec.submit(
              new Runnable() {
                @Override
                public void run() {
                  final Security s = coalescing.get(uidA);
                  assertSame(s, secA);
                }
              });
      final Future<?> b =
          exec.submit(
              new Runnable() {
                @Override
                public void run() {
                  join(barrier);
                  final Security s = coalescing.get(uidB);
                  assertSame(s, secB);
                }
              });
      final Future<?> c =
          exec.submit(
              new Runnable() {
                @Override
                public void run() {
                  join(barrier);
                  final Security s = coalescing.get(uidC);
                  assertSame(s, secC);
                }
              });
      join(barrier);
      a.get(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
      b.get(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
      c.get(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
    } finally {
      exec.shutdownNow();
    }
  }
  public void testGetSecurities_byUniqueId_b() throws Exception {
    final UniqueId uidA = UniqueId.of("Test", "A");
    final Security secA = Mockito.mock(Security.class);
    final UniqueId uidB = UniqueId.of("Test", "B");
    final Security secB = Mockito.mock(Security.class);
    final UniqueId uidC = UniqueId.of("Test", "C");
    final Security secC = Mockito.mock(Security.class);
    final CyclicBarrier barrier1 = new CyclicBarrier(3);
    final CyclicBarrier barrier2 = new CyclicBarrier(2);
    final MockSecuritySource underlying =
        new MockSecuritySource() {

          int _state;

          @Override
          public Map<UniqueId, Security> get(final Collection<UniqueId> uids) {
            final Map<UniqueId, Security> result = Maps.newHashMapWithExpectedSize(uids.size());
            if (++_state == 1) {
              assertEquals(uids.size(), 2);
              join(barrier1);
              assertTrue(uids.contains(uidA));
              assertTrue(uids.contains(uidB));
              result.put(uidA, secA);
              result.put(uidB, secB);
              // Pause for a bit to make sure that the other threads get blocked in their
              // getSecurity methods
              sleep();
            } else if (_state == 2) {
              assertEquals(uids.size(), 3);
              assertTrue(uids.contains(uidA));
              assertTrue(uids.contains(uidB));
              assertTrue(uids.contains(uidC));
              result.put(uidA, secA);
              result.put(uidB, secB);
              result.put(uidC, secC);
            } else {
              fail();
            }
            return result;
          }
        };
    final CoalescingSecuritySource coalescing =
        new CoalescingSecuritySource(underlying) {
          @Override
          protected void releaseOtherWritingThreads() {
            join(barrier2); // 1 + 2 // release the third thread
          }
        };
    // Start two threads. One will do the first write, the other will be blocked. Suppressing
    // releaseOtherThreads means a third
    // call will try to write its own value plus those from the other threads. The second thread
    // will do no I/O itself.
    final ExecutorService exec = Executors.newCachedThreadPool();
    try {
      final Future<?> a =
          exec.submit(
              new Runnable() {
                @Override
                public void run() {
                  final Map<UniqueId, Security> result = coalescing.get(Arrays.asList(uidA, uidB));
                  assertEquals(result.size(), 2);
                  assertSame(result.get(uidA), secA);
                  assertSame(result.get(uidB), secB);
                }
              });
      final Future<?> b =
          exec.submit(
              new Runnable() {
                @Override
                public void run() {
                  join(barrier1);
                  final Map<UniqueId, Security> result = coalescing.get(Arrays.asList(uidA, uidC));
                  assertEquals(result.size(), 2);
                  assertSame(result.get(uidA), secA);
                  assertSame(result.get(uidC), secC);
                }
              });
      final Future<?> c =
          exec.submit(
              new Runnable() {
                @Override
                public void run() {
                  join(barrier2); // 1
                  final Map<UniqueId, Security> result = coalescing.get(Arrays.asList(uidB, uidC));
                  assertEquals(result.size(), 2);
                  assertSame(result.get(uidB), secB);
                  assertSame(result.get(uidC), secC);
                }
              });
      join(barrier1);
      a.get(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
      b.get(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
      join(barrier2); // 1 + 2
      c.get(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
    } finally {
      exec.shutdownNow();
    }
  }
/** Test. */
@Test(groups = TestGroup.INTEGRATION)
public class JmsByteArrayTransportTest {

  private static final Logger s_logger = LoggerFactory.getLogger(JmsByteArrayTransportTest.class);

  private static final long TIMEOUT = 10L * Timeout.standardTimeoutMillis();

  @Test(invocationCount = 5, successPercentage = 19)
  public void topicConduit() throws Exception {
    String topicName =
        "JmsByteArrayTransportTest-topicConduit-"
            + System.getProperty("user.name")
            + "-"
            + System.currentTimeMillis();
    ConnectionFactory cf = ActiveMQTestUtils.createTestConnectionFactory();
    JmsTemplate jmsTemplate = new JmsTemplate();
    jmsTemplate.setConnectionFactory(cf);
    jmsTemplate.setPubSubDomain(true);

    JmsByteArrayMessageSender messageSender = new JmsByteArrayMessageSender(topicName, jmsTemplate);
    CollectingByteArrayMessageReceiver collectingReceiver =
        new CollectingByteArrayMessageReceiver();
    JmsByteArrayMessageDispatcher messageDispatcher =
        new JmsByteArrayMessageDispatcher(collectingReceiver);

    DefaultMessageListenerContainer container = new DefaultMessageListenerContainer();
    container.setConnectionFactory(cf);
    container.setMessageListener(messageDispatcher);
    container.setDestinationName(topicName);
    container.setPubSubDomain(true);
    container.afterPropertiesSet();
    container.start();

    Random random = new Random();
    byte[] randomBytes = new byte[1024];
    random.nextBytes(randomBytes);

    while (!container.isRunning()) {
      Thread.sleep(10l);
    }
    // TODO: this is a hack.  The context doesn't seem to have always set up the consumer completely
    // yet
    Thread.sleep(500l);

    messageSender.send(randomBytes);
    long startTime = System.currentTimeMillis();
    while (collectingReceiver.getMessages().isEmpty()) {
      Thread.sleep(10l);
      if ((System.currentTimeMillis() - startTime) > TIMEOUT) {
        fail("Did not receive a message in " + (TIMEOUT / 1000) + " seconds.");
      }
    }
    s_logger.debug(
        "topicConduit message received {}ms before timeout limit",
        TIMEOUT - (System.currentTimeMillis() - startTime));
    assertEquals(1, collectingReceiver.getMessages().size());
    byte[] receivedBytes = collectingReceiver.getMessages().get(0);
    assertEquals(randomBytes.length, receivedBytes.length);
    for (int i = 0; i < randomBytes.length; i++) {
      assertEquals(randomBytes[i], receivedBytes[i]);
    }

    container.stop();
    container.destroy();
  }

  @Test(invocationCount = 5, successPercentage = 19)
  public void requestConduit() throws Exception {
    String topicName =
        "JmsByteArrayTransportTest-requestConduit-"
            + System.getProperty("user.name")
            + "-"
            + System.currentTimeMillis();
    ConnectionFactory cf = ActiveMQTestUtils.createTestConnectionFactory();
    JmsTemplate jmsTemplate = new JmsTemplate();
    jmsTemplate.setConnectionFactory(cf);
    jmsTemplate.setPubSubDomain(true);
    jmsTemplate.setReceiveTimeout(5000l);

    final Random random = new Random();
    final byte[] responseBytes = new byte[512];
    random.nextBytes(responseBytes);

    JmsByteArrayRequestSender requestSender = new JmsByteArrayRequestSender(topicName, jmsTemplate);
    JmsByteArrayRequestDispatcher requestDispatcher =
        new JmsByteArrayRequestDispatcher(
            new ByteArrayRequestReceiver() {
              @Override
              public byte[] requestReceived(byte[] message) {
                return responseBytes;
              }
            });

    DefaultMessageListenerContainer container = new DefaultMessageListenerContainer();
    container.setConnectionFactory(cf);
    container.setMessageListener(requestDispatcher);
    container.setDestinationName(topicName);
    container.setPubSubDomain(true);
    container.afterPropertiesSet();
    container.start();

    byte[] randomBytes = new byte[1024];
    random.nextBytes(randomBytes);

    while (!container.isRunning()) {
      Thread.sleep(10l);
    }

    CollectingByteArrayMessageReceiver collectingReceiver =
        new CollectingByteArrayMessageReceiver();
    requestSender.sendRequest(randomBytes, collectingReceiver);
    long startTime = System.currentTimeMillis();
    while (collectingReceiver.getMessages().isEmpty()) {
      Thread.sleep(10l);
      if ((System.currentTimeMillis() - startTime) > TIMEOUT) {
        fail("Did not receive a response in " + (TIMEOUT / 1000) + " seconds.");
      }
    }
    s_logger.debug(
        "requestConduit message received {}ms before timeout limit",
        TIMEOUT - (System.currentTimeMillis() - startTime));
    assertEquals(1, collectingReceiver.getMessages().size());
    byte[] receivedBytes = collectingReceiver.getMessages().get(0);
    assertEquals(responseBytes.length, receivedBytes.length);
    for (int i = 0; i < responseBytes.length; i++) {
      assertEquals(responseBytes[i], receivedBytes[i]);
    }

    container.stop();
    container.destroy();
  }
}