/** Get events filtered by program orgUnit and between dates. */
  public static List<Event> getEvents(String program, String orgUnit, Date minDate, Date maxDate) {
    try {
      Response response;

      String DHIS_URL = PreferencesState.getInstance().getDhisURL();
      String startDate = EventExtended.format(minDate, EventExtended.AMERICAN_DATE_FORMAT);
      String endDate =
          EventExtended.format(
              new Date(maxDate.getTime() + (8 * 24 * 60 * 60 * 1000)),
              EventExtended.AMERICAN_DATE_FORMAT);
      String url =
          String.format(DHIS_URL + DHIS_CHECK_EVENT_API, program, orgUnit, startDate, endDate);
      Log.d(TAG, url);
      url = ServerAPIController.encodeBlanks(url);

      response = ServerAPIController.executeCall(null, url, "GET");
      if (!response.isSuccessful()) {
        Log.e(TAG, "pushData (" + response.code() + "): " + response.body().string());
        throw new IOException(response.message());
      }
      JSONObject events = new JSONObject(response.body().string());
      ObjectMapper mapper = new ObjectMapper();
      JsonNode jsonNode = mapper.convertValue(mapper.readTree(events.toString()), JsonNode.class);

      return EventsWrapper.getEvents(jsonNode);

    } catch (Exception ex) {
      ex.printStackTrace();
      return null;
    }
  }
  @Test
  public void serialization() {
    String cron = "0/5 * * * * ?";
    CronTrigger trigger =
        TriggerBuilder.newTrigger()
            .forJob("testJob", "testGroup")
            .withIdentity("testTrigger", "testTriggerGroup")
            .withSchedule(CronScheduleBuilder.cronSchedule(cron))
            .usingJobData("timeout", 5)
            .withDescription("A description!")
            .build();

    Map<String, String> triggerMap =
        mapper.convertValue(trigger, new TypeReference<HashMap<String, String>>() {});

    assertThat(triggerMap, hasKey("name"));
    assertEquals("testTrigger", triggerMap.get("name"));
    assertThat(triggerMap, hasKey("group"));
    assertEquals("testTriggerGroup", triggerMap.get("group"));
    assertThat(triggerMap, hasKey("jobName"));
    assertEquals("testJob", triggerMap.get("jobName"));

    CronTriggerImpl cronTrigger = mapper.convertValue(triggerMap, CronTriggerImpl.class);

    assertEquals(trigger.getKey().getName(), cronTrigger.getKey().getName());
    assertEquals(trigger.getKey().getGroup(), cronTrigger.getKey().getGroup());
    assertEquals(trigger.getStartTime(), cronTrigger.getStartTime());
    assertEquals(trigger.getCronExpression(), cronTrigger.getCronExpression());
    assertEquals(trigger.getTimeZone(), cronTrigger.getTimeZone());
  }
  /** Override this to place result in non-standard location on document */
  protected ActivityObject getEntityToExtend(ObjectNode rootDocument) {

    if (this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY))
      return mapper.convertValue(rootDocument, ActivityObject.class);
    else
      return mapper.convertValue(
          rootDocument.get(this.configuration.getEntity().toString()), ActivityObject.class);
  }
  /** Override this to place result in non-standard location on document */
  protected ObjectNode setEntityToExtend(ObjectNode rootDocument, ActivityObject activityObject) {

    if (this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY))
      return mapper.convertValue(activityObject, ObjectNode.class);
    else
      rootDocument.set(
          this.configuration.getEntity().toString(),
          mapper.convertValue(activityObject, ObjectNode.class));

    return rootDocument;
  }
  public List<BpClass> getParents(String id, String ontology, String apiKey) throws IOException {
    String url =
        BP_API_BASE
            + BP_ONTOLOGIES
            + ontology
            + "/"
            + BP_CLASSES
            + id
            + "/parents"
            + "?include=prefLabel,hasChildren,created,synonym,definition";

    HttpResponse response =
        HttpUtil.makeHttpRequest(
            Request.Get(url)
                .addHeader("Authorization", Util.getBioPortalAuthHeader(apiKey))
                .connectTimeout(connectTimeout)
                .socketTimeout(socketTimeout));

    int statusCode = response.getStatusLine().getStatusCode();
    // Success
    if (statusCode == 200) {
      ObjectMapper mapper = new ObjectMapper();
      JsonNode bpResult =
          mapper.readTree(new String(EntityUtils.toByteArray(response.getEntity())));
      List<BpClass> children = new ArrayList<>();
      for (JsonNode n : bpResult) {
        children.add(mapper.convertValue(n, BpClass.class));
      }
      return children;
    } else {
      throw new HTTPException(statusCode);
    }
  }
  public List<BpTreeNode> getTree(String id, String ontology, String apiKey) throws IOException {
    String url = BP_API_BASE + BP_ONTOLOGIES + ontology + "/" + BP_CLASSES + id + "/tree";

    HttpResponse response =
        HttpUtil.makeHttpRequest(
            Request.Get(url)
                .addHeader("Authorization", Util.getBioPortalAuthHeader(apiKey))
                .connectTimeout(connectTimeout)
                .socketTimeout(socketTimeout));

    int statusCode = response.getStatusLine().getStatusCode();
    // The tree was successfully retrieved
    if (statusCode == 200) {
      ObjectMapper mapper = new ObjectMapper();
      JsonNode bpResult =
          mapper.readTree(new String(EntityUtils.toByteArray(response.getEntity())));
      List<BpTreeNode> nodes = new ArrayList<>();
      for (JsonNode n : bpResult) {
        nodes.add(mapper.convertValue(n, BpTreeNode.class));
      }
      return nodes;
    } else {
      throw new HTTPException(statusCode);
    }
  }
  private HadoopDruidIndexerConfig testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
      PathSpec datasourcePathSpec, Interval jobInterval) throws Exception {
    HadoopIngestionSpec spec =
        new HadoopIngestionSpec(
            new DataSchema(
                "foo",
                null,
                new AggregatorFactory[0],
                new UniformGranularitySpec(
                    Granularity.DAY, null, ImmutableList.of(new Interval("2010-01-01/P1D"))),
                jsonMapper),
            new HadoopIOConfig(jsonMapper.convertValue(datasourcePathSpec, Map.class), null, null),
            null);

    spec = jsonMapper.readValue(jsonMapper.writeValueAsString(spec), HadoopIngestionSpec.class);

    UsedSegmentLister segmentLister = EasyMock.createMock(UsedSegmentLister.class);
    EasyMock.expect(
            segmentLister.getUsedSegmentsForIntervals(
                testDatasource, Lists.newArrayList(jobInterval)))
        .andReturn(ImmutableList.of(SEGMENT));
    EasyMock.replay(segmentLister);

    spec =
        HadoopIngestionSpec.updateSegmentListIfDatasourcePathSpecIsUsed(
            spec, jsonMapper, segmentLister);
    return HadoopDruidIndexerConfig.fromString(jsonMapper.writeValueAsString(spec));
  }
Exemple #8
0
 public Pair<Integer, JsonNode> getErrorResponse() {
   Map<String, List<String>> errors =
       Collections.singletonMap(
           "errors", Collections.singletonList(getMessage() == null ? toString() : getMessage()));
   JsonNode responseBody = OBJECT_MAPPER.convertValue(errors, JsonNode.class);
   return Pair.of(getStatus(), responseBody);
 }
  public BpClass find(String id, String ontology, String apiKey) throws HTTPException, IOException {
    String url =
        BP_API_BASE
            + BP_ONTOLOGIES
            + ontology
            + "/"
            + BP_CLASSES
            + id
            + "?include=prefLabel,hasChildren,created,synonym,definition";

    HttpResponse response =
        HttpUtil.makeHttpRequest(
            Request.Get(url)
                .addHeader("Authorization", Util.getBioPortalAuthHeader(apiKey))
                .connectTimeout(connectTimeout)
                .socketTimeout(socketTimeout));

    int statusCode = response.getStatusLine().getStatusCode();
    // The class was successfully retrieved
    if (statusCode == 200) {
      ObjectMapper mapper = new ObjectMapper();
      JsonNode bpResult =
          mapper.readTree(new String(EntityUtils.toByteArray(response.getEntity())));
      return mapper.convertValue(bpResult, BpClass.class);
    } else {
      throw new HTTPException(statusCode);
    }
  }
  protected <T> Object[] parseIn(Expression<?> path, JsonNode fieldValue)
      throws QueryInfoException {
    Class<?> javaType = path.getJavaType();

    TypeFactory typeFactory = objectMapper.getTypeFactory();
    ArrayType arrayType = typeFactory.constructArrayType(javaType);

    return objectMapper.convertValue(fieldValue, arrayType);
  }
Exemple #11
0
  /**
   * make the excel file based on headers and contents which should have been set
   *
   * @param sheetName specified sheet name
   * @return ExcelMaker itself
   * @throws IllegalArgumentException only support String, Integer, Long, Double, Date, null as cell
   *     data.
   */
  public ExcelMaker make(String sheetName) throws IllegalArgumentException {

    // check if headers and contents set
    if (!contextReady()) {
      throw new IllegalArgumentException("invalid headers or contents");
    }

    Sheet sheet = sheetName == null ? workbook.createSheet() : workbook.createSheet(sheetName);

    // write the display headers as the 1st row in the sheet
    int rowNum = 0;
    Row headerRow = sheet.createRow(rowNum++);
    if (displayHeaders != null) {
      for (int cellNum = 0; cellNum < displayHeaders.size(); cellNum++) {
        String header = displayHeaders.get(cellNum);
        Cell cell = headerRow.createCell(cellNum);
        cell.setCellValue(header);
      }
    } else { // if display headers not set, use headers as default
      for (int cellNum = 0; cellNum < headers.size(); cellNum++) {
        String header = headers.get(cellNum);
        Cell cell = headerRow.createCell(cellNum);
        cell.setCellValue(header);
      }
    }

    // parse each Object in content list and write as a row
    for (Object content : contents) {
      Map<String, Object> headerMap =
          objectMapper.convertValue(content, new TypeReference<HashMap<String, Object>>() {});
      Row row = sheet.createRow(rowNum++);
      // only find properties that declared by headers
      for (int cellNum = 0; cellNum < headers.size(); cellNum++) {
        String header = headers.get(cellNum);
        Cell cell = row.createCell(cellNum);
        Object obj = headerMap.get(header);
        if (obj == null) {
          obj = "";
        }
        if (obj instanceof String) {
          cell.setCellValue((String) obj);
        } else if (obj instanceof Integer) {
          cell.setCellValue((Integer) obj);
        } else if (obj instanceof Long) {
          cell.setCellValue((Long) obj);
        } else if (obj instanceof Double) {
          cell.setCellValue((Double) obj);
        } else if (obj instanceof Date) {
          cell.setCellValue((Date) obj);
        } else {
          throw new IllegalArgumentException("unsupported cell type");
        }
      }
    }
    return this;
  }
 private static boolean apply(Object objectToSerialize, String str, ObjectMapper mapper) {
   final ObjectNode lhs = mapper.convertValue(objectToSerialize, ObjectNode.class);
   ObjectNode rhs = null;
   try {
     rhs = mapper.readValue(str, ObjectNode.class);
   } catch (IOException e) {
     LOGGER.error("Failed to read value", e);
   }
   return lhs.equals(rhs);
 }
 @DELETE
 @Path("/bulk")
 public Response bulkDelete(String data) throws CimbleException {
   try {
     ObjectMapper mapper = new ObjectMapper();
     List<E> list = (List<E>) mapper.convertValue(mapper.readValue(data, dtoType), filterType);
     return bulkDelete(list);
   } catch (Exception e) {
     e.printStackTrace();
     throw new CimbleException("Error in delete()", e);
   }
 }
  protected <T> T parseType(JsonNode fieldValue, Class<T> javaType) throws QueryInfoException {
    T result;

    try {
      result = objectMapper.convertValue(fieldValue, javaType);
    } catch (IllegalArgumentException e) {
      String message = String.format("Could not parse [%s] as [%s]", fieldValue, javaType);
      throw new QueryInfoException(message, e);
    }

    return result;
  }
  /**
   * Simple test to verify that byte[] values can be handled properly when converting, as long as
   * there is metadata (from POJO definitions).
   */
  public void testIssue709() throws Exception {
    byte[] inputData = new byte[] {1, 2, 3};
    ObjectNode node = MAPPER.createObjectNode();
    node.put("data", inputData);
    Issue709Bean result = MAPPER.treeToValue(node, Issue709Bean.class);
    String json = MAPPER.writeValueAsString(node);
    Issue709Bean resultFromString = MAPPER.readValue(json, Issue709Bean.class);
    Issue709Bean resultFromConvert = MAPPER.convertValue(node, Issue709Bean.class);

    // all methods should work equally well:
    Assert.assertArrayEquals(inputData, resultFromString.data);
    Assert.assertArrayEquals(inputData, resultFromConvert.data);
    Assert.assertArrayEquals(inputData, result.data);
  }
  @Override
  public Supplier<Pair<Integer, JsonNode>> handleGet(StateContext state) {
    JsonApiDocument doc = new JsonApiDocument();
    RequestScope requestScope = state.getRequestScope();
    ObjectMapper mapper = requestScope.getMapper().getObjectMapper();
    Optional<MultivaluedMap<String, String>> queryParams = requestScope.getQueryParams();

    Map<String, Relationship> relationships = record.toResource().getRelationships();
    Relationship relationship = null;
    if (relationships != null) {
      relationship = relationships.get(relationshipName);
    }

    // Handle valid relationship
    if (relationship != null) {

      // Set data
      Data<Resource> data = relationship.getData();
      doc.setData(data);

      // Run include processor
      DocumentProcessor includedProcessor = new IncludedProcessor();
      includedProcessor.execute(doc, record, queryParams);

      return () -> Pair.of(HttpStatus.SC_OK, mapper.convertValue(doc, JsonNode.class));
    }

    // Handle no data for relationship
    if (relationshipType.isToMany()) {
      doc.setData(new Data<>(new ArrayList<>()));
    } else if (relationshipType.isToOne()) {
      doc.setData(new Data<>((Resource) null));
    } else {
      throw new IllegalStateException("Failed to PATCH a relationship");
    }
    return () -> Pair.of(HttpStatus.SC_OK, mapper.convertValue(doc, JsonNode.class));
  }
 private JsonNode createProduct(JsonNode node, String userName) throws Exception {
   ObjectMapper om = new ObjectMapper();
   ArrayNode anode = om.createArrayNode();
   if (node.isArray()) {
     Iterator<JsonNode> nodeiterator = node.iterator();
     while (nodeiterator.hasNext()) {
       ProductBack cbase = om.convertValue(nodeiterator.next(), ProductBack.class);
       anode.add(createProductToDb(om, cbase, userName));
     }
   } else {
     ProductBack cbase = JSON.parseObject(node.asText(), ProductBack.class);
     anode.add(createProductToDb(om, cbase, userName));
   }
   return anode;
 }
 // Return given object as a String
 // AG
 static String getStringFromObject(Object o) {
   String temp = "";
   if (o instanceof String) temp = mapper.convertValue(o, String.class);
   return temp;
 }
 public static <T> T convertValue(Object value, Class<T> type) {
   return OBJECT_MAPPER.convertValue(value, type);
 }
Exemple #20
0
  public AppenderatorTester(final int maxRowsInMemory, final File basePersistDirectory) {
    objectMapper = new DefaultObjectMapper();
    objectMapper.registerSubtypes(LinearShardSpec.class);

    final Map<String, Object> parserMap =
        objectMapper.convertValue(
            new MapInputRowParser(
                new JSONParseSpec(
                    new TimestampSpec("ts", "auto", null), new DimensionsSpec(null, null, null))),
            Map.class);
    schema =
        new DataSchema(
            DATASOURCE,
            parserMap,
            new AggregatorFactory[] {
              new CountAggregatorFactory("count"), new LongSumAggregatorFactory("met", "met")
            },
            new UniformGranularitySpec(Granularity.MINUTE, QueryGranularities.NONE, null),
            objectMapper);

    tuningConfig =
        new RealtimeTuningConfig(
            maxRowsInMemory,
            null,
            null,
            basePersistDirectory,
            null,
            null,
            null,
            null,
            null,
            null,
            0,
            0,
            null,
            null);

    metrics = new FireDepartmentMetrics();
    queryExecutor = Execs.singleThreaded("queryExecutor(%d)");

    indexIO =
        new IndexIO(
            objectMapper,
            new ColumnConfig() {
              @Override
              public int columnCacheSizeBytes() {
                return 0;
              }
            });
    indexMerger = new IndexMerger(objectMapper, indexIO);

    emitter =
        new ServiceEmitter(
            "test",
            "test",
            new LoggingEmitter(
                new Logger(AppenderatorTester.class), LoggingEmitter.Level.INFO, objectMapper));
    emitter.start();
    EmittingLogger.registerEmitter(emitter);
    dataSegmentPusher =
        new DataSegmentPusher() {
          @Deprecated
          @Override
          public String getPathForHadoop(String dataSource) {
            return getPathForHadoop();
          }

          @Override
          public String getPathForHadoop() {
            throw new UnsupportedOperationException();
          }

          @Override
          public DataSegment push(File file, DataSegment segment) throws IOException {
            pushedSegments.add(segment);
            return segment;
          }
        };
    appenderator =
        Appenderators.createRealtime(
            schema,
            tuningConfig,
            metrics,
            dataSegmentPusher,
            objectMapper,
            indexIO,
            indexMerger,
            new DefaultQueryRunnerFactoryConglomerate(
                ImmutableMap.<Class<? extends Query>, QueryRunnerFactory>of(
                    TimeseriesQuery.class,
                    new TimeseriesQueryRunnerFactory(
                        new TimeseriesQueryQueryToolChest(
                            new IntervalChunkingQueryRunnerDecorator(
                                queryExecutor, QueryRunnerTestHelper.NOOP_QUERYWATCHER, emitter)),
                        new TimeseriesQueryEngine(),
                        QueryRunnerTestHelper.NOOP_QUERYWATCHER))),
            new DataSegmentAnnouncer() {
              @Override
              public void announceSegment(DataSegment segment) throws IOException {}

              @Override
              public void unannounceSegment(DataSegment segment) throws IOException {}

              @Override
              public void announceSegments(Iterable<DataSegment> segments) throws IOException {}

              @Override
              public void unannounceSegments(Iterable<DataSegment> segments) throws IOException {}

              @Override
              public boolean isAnnounced(DataSegment segment) {
                return false;
              }
            },
            emitter,
            queryExecutor,
            MapCache.create(2048),
            new CacheConfig());
  }
  protected Response createExecutionVariable(
      Execution execution,
      boolean override,
      int variableType,
      HttpServletRequest httpServletRequest,
      UriInfo uriInfo) {

    Object result = null;
    Response.ResponseBuilder responseBuilder = Response.ok();

    List<RestVariable> inputVariables = new ArrayList<>();
    List<RestVariable> resultVariables = new ArrayList<>();

    if (Utils.isApplicationJsonRequest(httpServletRequest)) {
      try {
        ObjectMapper objectMapper = new ObjectMapper();
        @SuppressWarnings("unchecked")
        List<Object> variableObjects =
            (List<Object>) objectMapper.readValue(httpServletRequest.getInputStream(), List.class);
        for (Object restObject : variableObjects) {
          RestVariable restVariable = objectMapper.convertValue(restObject, RestVariable.class);
          inputVariables.add(restVariable);
        }
      } catch (Exception e) {
        throw new ActivitiIllegalArgumentException(
            "Failed to serialize to a RestVariable instance", e);
      }
    } else if (Utils.isApplicationXmlRequest(httpServletRequest)) {
      JAXBContext jaxbContext = null;
      try {
        jaxbContext = JAXBContext.newInstance(RestVariableCollection.class);
        Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
        RestVariableCollection restVariableCollection =
            (RestVariableCollection)
                jaxbUnmarshaller.unmarshal(httpServletRequest.getInputStream());
        if (restVariableCollection == null) {
          throw new ActivitiIllegalArgumentException(
              "xml request body could not be transformed to a "
                  + "RestVariable Collection instance.");
        }
        List<RestVariable> restVariableList = restVariableCollection.getRestVariables();

        if (restVariableList.size() == 0) {
          throw new ActivitiIllegalArgumentException(
              "xml request body could not identify any rest " + "variables to be updated");
        }
        for (RestVariable restVariable : restVariableList) {
          inputVariables.add(restVariable);
        }

      } catch (JAXBException | IOException e) {
        throw new ActivitiIllegalArgumentException(
            "xml request body could not be transformed to a " + "RestVariable instance.", e);
      }
    }

    if (inputVariables.size() == 0) {
      throw new ActivitiIllegalArgumentException(
          "Request didn't contain a list of variables to create.");
    }

    RestVariable.RestVariableScope sharedScope = null;
    RestVariable.RestVariableScope varScope = null;
    Map<String, Object> variablesToSet = new HashMap<String, Object>();

    for (RestVariable var : inputVariables) {
      // Validate if scopes match
      varScope = var.getVariableScope();
      if (var.getName() == null) {
        throw new ActivitiIllegalArgumentException("Variable name is required");
      }

      if (varScope == null) {
        varScope = RestVariable.RestVariableScope.LOCAL;
      }
      if (sharedScope == null) {
        sharedScope = varScope;
      }
      if (varScope != sharedScope) {
        throw new ActivitiIllegalArgumentException(
            "Only allowed to update multiple variables in the same scope.");
      }

      if (!override && hasVariableOnScope(execution, var.getName(), varScope)) {
        throw new BPMNConflictException(
            "Variable '"
                + var.getName()
                + "' is already present on execution '"
                + execution.getId()
                + "'.");
      }

      Object actualVariableValue = new RestResponseFactory().getVariableValue(var);
      variablesToSet.put(var.getName(), actualVariableValue);
      resultVariables.add(
          new RestResponseFactory()
              .createRestVariable(
                  var.getName(),
                  actualVariableValue,
                  varScope,
                  execution.getId(),
                  variableType,
                  false,
                  uriInfo.getBaseUri().toString()));
    }

    if (!variablesToSet.isEmpty()) {
      RuntimeService runtimeService = BPMNOSGIService.getRumtimeService();
      if (sharedScope == RestVariable.RestVariableScope.LOCAL) {
        runtimeService.setVariablesLocal(execution.getId(), variablesToSet);
      } else {
        if (execution.getParentId() != null) {
          // Explicitly set on parent, setting non-local variables on execution itself will override
          // local-variables if exists
          runtimeService.setVariables(execution.getParentId(), variablesToSet);
        } else {
          // Standalone task, no global variables possible
          throw new ActivitiIllegalArgumentException(
              "Cannot set global variables on execution '"
                  + execution.getId()
                  + "', task is not part of process.");
        }
      }
    }

    RestVariableCollection restVariableCollection = new RestVariableCollection();
    restVariableCollection.setRestVariables(resultVariables);
    responseBuilder.entity(restVariableCollection);
    return responseBuilder.status(Response.Status.CREATED).build();
  }
 // Return given object as a double
 // AG
 static double getDoubleFromObject(Object o) {
   double temp = 0.0;
   if (o instanceof Double) temp = mapper.convertValue(o, Double.class);
   return temp;
 }
 // Return given object as a list
 // AG
 static List<?> getListFromObject(Object o) {
   @SuppressWarnings("rawtypes")
   List<?> temp = new LinkedList();
   if (o instanceof List<?>) temp = mapper.convertValue(o, List.class);
   return temp;
 }
Exemple #24
0
 private Pair<Integer, JsonNode> buildResponse(Map<String, List<String>> errors) {
   JsonNode responseBody = OBJECT_MAPPER.convertValue(errors, JsonNode.class);
   return Pair.of(getStatus(), responseBody);
 }
  @Before
  public void setUp() throws Exception {
    final File tmpDir = Files.createTempDir();
    tmpDir.deleteOnExit();

    ObjectMapper jsonMapper = new DefaultObjectMapper();

    schema =
        new DataSchema(
            "test",
            jsonMapper.convertValue(
                new StringInputRowParser(
                    new JSONParseSpec(
                        new TimestampSpec("timestamp", "auto", null),
                        new DimensionsSpec(null, null, null))),
                Map.class),
            new AggregatorFactory[] {new CountAggregatorFactory("rows")},
            new UniformGranularitySpec(Granularity.HOUR, QueryGranularity.NONE, null),
            jsonMapper);

    announcer = EasyMock.createMock(DataSegmentAnnouncer.class);
    announcer.announceSegment(EasyMock.<DataSegment>anyObject());
    EasyMock.expectLastCall().anyTimes();

    segmentPublisher = EasyMock.createNiceMock(SegmentPublisher.class);
    dataSegmentPusher = EasyMock.createNiceMock(DataSegmentPusher.class);
    serverView = EasyMock.createMock(FilteredServerView.class);
    serverView.registerSegmentCallback(
        EasyMock.<Executor>anyObject(),
        EasyMock.<ServerView.SegmentCallback>anyObject(),
        EasyMock.<Predicate<DataSegment>>anyObject());
    EasyMock.expectLastCall().anyTimes();

    emitter = EasyMock.createMock(ServiceEmitter.class);

    EasyMock.replay(announcer, segmentPublisher, dataSegmentPusher, serverView, emitter);

    tuningConfig =
        new RealtimeTuningConfig(
            1,
            null,
            null,
            null,
            new IntervalStartVersioningPolicy(),
            rejectionPolicy,
            null,
            null,
            null,
            null,
            null,
            null,
            null);

    realtimePlumberSchool =
        new RealtimePlumberSchool(
            emitter,
            new DefaultQueryRunnerFactoryConglomerate(
                Maps.<Class<? extends Query>, QueryRunnerFactory>newHashMap()),
            dataSegmentPusher,
            announcer,
            segmentPublisher,
            serverView,
            MoreExecutors.sameThreadExecutor());

    metrics = new FireDepartmentMetrics();
    plumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema, tuningConfig, metrics);
  }
 // Return given object as an int
 // AG
 static int getIntFromObject(Object o) {
   int temp = 0;
   if (o instanceof Integer) temp = mapper.convertValue(o, Integer.class);
   return temp;
 }