Ejemplo n.º 1
0
 void compute() {
   if (ll != null) {
     return;
   }
   Filter filter = new Filter();
   filter.setSsf(ssf);
   DiffuseFilteringResults dfr = new DiffuseFilteringResults();
   filter.process(ssfdata, dfr);
   DiffuseConcentratedLikelihood dll = new DiffuseConcentratedLikelihood();
   LikelihoodEvaluation.evaluate(dfr, dll);
   ll = new DefaultLikelihoodEvaluation<>(dll);
 }
Ejemplo n.º 2
0
  /*
   * def test_truncatewords
   *   assert_equal 'one two three', @filters.truncatewords('one two three', 4)
   *   assert_equal 'one two...', @filters.truncatewords('one two three', 2)
   *   assert_equal 'one two three', @filters.truncatewords('one two three')
   *   assert_equal 'Two small (13&#8221; x 5.5&#8221; x 10&#8221; high) baskets fit inside one large basket (13&#8221;...',
   *                 @filters.truncatewords('Two small (13&#8221; x 5.5&#8221; x 10&#8221; high) baskets fit inside one large basket (13&#8221; x 16&#8221; x 10.5&#8221; high) with cover.', 15)
   * end
   */
  @Test
  public void applyOriginalTest() {

    final Filter filter = Filter.getFilter("truncatewords");

    assertThat(filter.apply("one two three", 4), is((Object) "one two three"));
    assertThat(filter.apply("one two three", 2), is((Object) "one two..."));
    assertThat(filter.apply("one two three", 3), is((Object) "one two three"));
    assertThat(
        filter.apply(
            "Two small (13&#8221; x 5.5&#8221; x 10&#8221; high) baskets fit inside one large basket (13&#8221; x 16&#8221; x 10.5&#8221; high) with cover.",
            15),
        is(
            (Object)
                "Two small (13&#8221; x 5.5&#8221; x 10&#8221; high) baskets fit inside one large basket (13&#8221;..."));
  }
Ejemplo n.º 3
0
  @Test
  public void mustBeAbleToFilterAFuture() throws Throwable {
    final CountDownLatch latch = new CountDownLatch(1);
    Promise<String> cf = Futures.promise();
    Future<String> f = cf.future();
    Future<String> r =
        f.filter(
            Filter.filterOf(
                new Function<String, Boolean>() {
                  public Boolean apply(String r) {
                    latch.countDown();
                    return r.equals("foo");
                  }
                }),
            system.dispatcher());

    cf.success("foo");
    assertTrue(latch.await(5000, TimeUnit.MILLISECONDS));
    assertEquals(Await.result(f, timeout), "foo");
    assertEquals(Await.result(r, timeout), "foo");
  }
Ejemplo n.º 4
0
  @Test
  public void testSODA() {
    // should say fieldsTypes, maybe with object/component prefix
    Map<String, Object> eventTypes = new HashMap<>();
    eventTypes.put(LITERAL_SYMBOL, String.class);
    eventTypes.put(LITERAL_PRICE, Integer.class);

    EPStatementObjectModel model = new EPStatementObjectModel();
    model.setInsertInto(InsertIntoClause.create(LITERAL_RETURN_OBJ));
    model.setSelectClause(
        SelectClause.create().add(Expressions.avg(LITERAL_PRICE), LITERAL_AVG).add(LITERAL_PRICE));
    Filter filter = Filter.create("quotes_default", Expressions.eq(LITERAL_SYMBOL, "A"));
    model.setFromClause(
        FromClause.create(
            FilterStream.create(filter).addView("win", "length", Expressions.constant(2))));
    model.setHavingClause(
        Expressions.gt(Expressions.avg(LITERAL_PRICE), Expressions.constant(60.0)));

    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout(LITERAL_QUOTES, new RandomSentenceSpout());
    builder
        .setBolt(
            LITERAL_ESPER,
            (new EsperBolt())
                .addEventTypes(eventTypes)
                .addOutputTypes(
                    Collections.singletonMap(
                        LITERAL_RETURN_OBJ, Arrays.asList(LITERAL_AVG, LITERAL_PRICE)))
                .addObjectStatemens(Collections.singleton(model)))
        .shuffleGrouping(LITERAL_QUOTES);
    builder.setBolt("print", new PrinterBolt()).shuffleGrouping(LITERAL_ESPER, LITERAL_RETURN_OBJ);

    Config conf = new Config();
    LocalCluster cluster = new LocalCluster();
    cluster.submitTopology("test", conf, builder.createTopology());
    Utils.sleep(10000);
    cluster.shutdown();
    assertEquals(resultSODA.get(100), new Double(75.0));
    assertEquals(resultSODA.get(50), new Double(75.0));
  }
Ejemplo n.º 5
0
  // time-varying trading days
  // @Test
  public void demoTD() {
    TsData s = Data.X;
    CompositeResults rslts = TramoSeatsProcessingFactory.process(s, TramoSeatsSpecification.RSA5);
    PreprocessingModel regarima = rslts.get("preprocessing", PreprocessingModel.class);
    SeatsResults seats = rslts.get("decomposition", SeatsResults.class);
    assertTrue(seats != null && regarima != null);

    if (regarima.isMultiplicative()) {
      s = s.log();
    }
    int[] calPos =
        regarima.description.getRegressionVariablePositions(ComponentType.CalendarEffect);
    UcarimaModel ucm = seats.getUcarimaModel();
    // compute the full decomposition...
    SsfUcarima stoch = new SsfUcarima(ucm);
    ExtendedSsfData xdata = new ExtendedSsfData(new SsfData(s, null));
    xdata.setForecastsCount(s.getFrequency().intValue());
    Matrix x =
        regarima
            .description
            .buildRegressionVariables()
            .all()
            .matrix(new TsDomain(s.getStart(), xdata.getCount()));
    RegSsf xssf = new RegSsf(stoch, x.subMatrix());

    Filter filter = new Filter();
    filter.setInitializer(new DiffuseSquareRootInitializer());
    filter.setSsf(xssf);
    DiffuseFilteringResults fr = new DiffuseFilteringResults(true);
    fr.getVarianceFilter().setSavingP(true);
    fr.getFilteredData().setSavingA(true);
    filter.process(xdata, fr);
    Smoother smoother = new Smoother();
    smoother.setSsf(xssf);
    smoother.setCalcVar(true);
    SmoothingResults sm = new SmoothingResults();
    smoother.process(xdata, fr, sm);

    Smoother lsmoother = new Smoother();
    lsmoother.setSsf(stoch);
    lsmoother.setCalcVar(true);
    SmoothingResults lsm = new SmoothingResults();
    ExtendedSsfData xldata =
        new ExtendedSsfData(new SsfData(regarima.linearizedSeries(false), null));
    xldata.setForecastsCount(s.getFrequency().intValue());
    lsmoother.process(xldata, lsm);

    int spos = stoch.cmpPos(1);
    DataBlock Z = new DataBlock(xssf.getStateDim());
    double[] v = new double[xdata.getCount()];
    double[] c = new double[xdata.getCount()];
    double[] svar = sm.componentVar(spos);
    double[] slvar = lsm.componentVar(spos);
    int start = regarima.description.getRegressionVariablesStartingPosition();
    for (int i = 0; i < v.length; ++i) {
      Z.set(spos, 1);
      for (int j = 0; j < calPos.length; ++j) {
        Z.set(stoch.getStateDim() + calPos[j], x.get(i, calPos[j]));
      }
      v[i] = sm.zvariance(i, Z);
      Z.set(spos, 0);
      c[i] = sm.zvariance(i, Z);
      System.out.print(svar[i]);
      System.out.print('\t');
      System.out.print(slvar[i]);
      System.out.print('\t');
      System.out.print(c[i]);
      System.out.print('\t');
      System.out.println(v[i]);
    }
    System.out.println(sm.P(50));
    System.out.println(sm.P(svar.length - 1));
    System.out.println(regarima.estimation.getLikelihood().getBVar());
  }