public List<ItemVO<Integer, Integer>> getItemsByItemType(
      Integer tenantId, Integer itemType, int count) {

    if (itemType == null) {
      throw new IllegalArgumentException("itemType must not be 'null'");
    }

    List<Object> args = Lists.newArrayList((Object) itemType);
    List<Integer> argt = Lists.newArrayList(Types.INTEGER);

    StringBuilder sqlString = new StringBuilder("SELECT ");
    sqlString.append(DEFAULT_TENANT_ID_COLUMN_NAME).append(",");
    sqlString.append(DEFAULT_ITEM_ID_COLUMN_NAME).append(",");
    sqlString.append(DEFAULT_ITEM_TYPE_ID_COLUMN_NAME);
    sqlString.append(" FROM ");
    sqlString.append(DEFAULT_TABLE_NAME);
    sqlString.append(" WHERE ");
    sqlString.append(DEFAULT_ITEM_TYPE_ID_COLUMN_NAME);
    sqlString.append("=?");
    if (tenantId != null) {
      sqlString.append(" AND ").append(DEFAULT_TENANT_ID_COLUMN_NAME).append("=?");
      args.add(tenantId);
      argt.add(Types.INTEGER);
    }
    if (count != 0) {
      sqlString.append(" LIMIT ?");
      args.add(count);
      argt.add(Types.INTEGER);
    }

    return getJdbcTemplate()
        .query(sqlString.toString(), args.toArray(), Ints.toArray(argt), itemRowMapper);
  }
  @Test
  public final void shouldNotSaveFormBecauseOfDeviationCausesValidationErrors() {
    // given
    stubProgressType(ProgressType.PLANNED);
    stubTechnologyOperation(null);

    Entity deviationCauseEntity = mockEntity(dataDefinition);
    Entity invalidDeviationCauseEntity = mockEntity(dataDefinition);
    given(invalidDeviationCauseEntity.isValid()).willReturn(false);
    given(dataDefinition.save(deviationCauseEntity)).willReturn(invalidDeviationCauseEntity);

    AwesomeDynamicListComponent deviationCausesAdl = mock(AwesomeDynamicListComponent.class);
    stubViewComponent(CORRECTION_CAUSE_TYPES_ADL_REF, deviationCausesAdl);
    stubHasManyField(
        ppsEntity,
        ProductionPerShiftFields.PLANNED_PROGRESS_CORRECTION_TYPES,
        Lists.newArrayList(deviationCauseEntity));

    // when
    progressPerShiftViewSaver.save(view);

    // then
    verify(txStatus, never()).setRollbackOnly();

    verify(dataDefinition).save(deviationCauseEntity);
    verify(dataDefinition, never()).save(ppsEntity);
    verify(deviationCausesAdl).setFieldValue(Lists.newArrayList(invalidDeviationCauseEntity));
    verify(form)
        .addMessage("qcadooView.message.saveFailedMessage", ComponentState.MessageType.FAILURE);
  }
  @Test
  public void analyseSingleArticleSourcesTest() {
    Library library = createSimpleLibrary();
    Article article = library.getArticles().get(0);
    assertThat("name of article", article.getTitle(), is(equalTo("F**k the system")));

    ISourceAnalysator analysator = createSourceAnalysator(library);
    Map<GeneralSource, List<Source>> generalSources =
        analysator.getGeneralSourcesOfArticle(article);
    assertThat("general sopurces count", generalSources.size(), is(equalTo(2)));

    List<GeneralSource> requiredSources = Lists.newArrayList(spiegel, guardian);
    for (Entry<GeneralSource, List<Source>> entry : generalSources.entrySet()) {
      GeneralSource generalSource = entry.getKey();
      boolean removedGeneralSource = requiredSources.remove(generalSource);
      assertThat("expected general source was found", removedGeneralSource, is(true));
      List<Source> sources = entry.getValue();
      if (generalSource.equals(spiegel)) {
        assertThat(
            "count of referenced articles of " + generalSource.getName(),
            sources.size(),
            is(equalTo(2)));
      } else if (generalSource.equals(guardian)) {
        assertThat(
            "count of referenced articles of " + generalSource.getName(),
            sources.size(),
            is(equalTo(1)));
      } else {
        fail("wrong general source");
      }
    }
    printSourcesOfArticles(Lists.newArrayList(article), generalSources);
  }
  static {
    ProductMatcher toothBrushProductMatcher = new SingleProductMatcher(toothBrush);
    DiscountElement toothBrushFirstTwo = new DiscountElement(noDiscount, 2);
    DiscountElement toothBrushFreeOne = new DiscountElement(freeDiscount, 1);

    toothBrushPromotionElement = new PromotionElement();
    toothBrushPromotionElement.setDiscountElements(
        Lists.newArrayList(toothBrushFirstTwo, toothBrushFreeOne));
    toothBrushPromotionElement.setMatcher(toothBrushProductMatcher);
    toothBrushPromotionElement.setTotalRequired(3);

    ProductMatcher chipsProductMatcher = new SingleProductMatcher(chips);
    DiscountElement chipsDiscount = new DiscountElement(fixed200Discount, 1);
    chipsPromotionElement = new PromotionElement();
    chipsPromotionElement.setDiscountElements(Lists.newArrayList(chipsDiscount));
    chipsPromotionElement.setMatcher(chipsProductMatcher);
    chipsPromotionElement.setTotalRequired(1);

    ProductMatcher salsaProductMatcher = new SingleProductMatcher(salsa);
    DiscountElement salsaDiscount = new DiscountElement(fixed299Discount, 1);
    salsaPromotionElement = new PromotionElement();
    salsaPromotionElement.setDiscountElements(Lists.newArrayList(salsaDiscount));
    salsaPromotionElement.setMatcher(salsaProductMatcher);
    salsaPromotionElement.setTotalRequired(1);
  }
  @Test
  public void runMultiValue() {
    Tap source =
        new MemorySourceTap(
            Arrays.asList(
                new Tuple(1, 1),
                new Tuple(1, 2),
                new Tuple(1, 1),
                new Tuple((Integer) null, (Integer) 2)),
            new Fields("key", "value"));

    Pipe pipe = new Pipe("pipe");
    pipe =
        new IncrementForFieldValues(
            pipe,
            "Group",
            "CounterA",
            new Fields("key", "value"),
            Lists.<Integer>newArrayList(1, 1));
    pipe =
        new IncrementForFieldValues(
            pipe,
            Counter.B,
            new Fields("key", "value"),
            Lists.<Object>newArrayList((Object) null, 2));

    Flow f = CascadingUtil.get().getFlowConnector().connect(source, new NullTap(), pipe);
    f.complete();

    Assert.assertEquals(2l, Counters.get(f, "Group", "CounterA").longValue());
    Assert.assertEquals(1l, Counters.get(f, Counter.B).longValue());
  }
  @Test
  public void pullFileProjectUsingFileMapping() throws Exception {
    PullOptionsImpl opts = mockServerRule.getPullOpts();
    opts.setPullType("trans");
    File pullBaseDir = tempFolder.newFolder("file-pull-test");
    opts.setSrcDir(pullBaseDir);
    opts.setTransDir(pullBaseDir);
    log.debug("pull base dir is: {}", pullBaseDir);
    // we define our own rule
    opts.setFileMappingRules(
        Lists.newArrayList(
            new FileMappingRule("**/*.odt", "{extension}/{path}/{locale}/{filename}.{extension}"),
            new FileMappingRule(
                "**/*.ods", "{extension}/{locale_with_underscore}/{filename}.{extension}")));

    InputStream sourceFileStream = IOUtils.toInputStream("source content", Charsets.UTF_8);
    InputStream transFileStream = IOUtils.toInputStream("translation content", Charsets.UTF_8);
    ArrayList<ResourceMeta> remoteDocList =
        Lists.newArrayList(new ResourceMeta("test-ods.ods"), new ResourceMeta("test-odt.odt"));

    RawPullCommand pullCommand =
        mockServerRule.createRawPullCommand(remoteDocList, sourceFileStream, transFileStream);

    pullCommand.run();

    assertThat(new File(pullBaseDir, "odt/zh-CN/test-odt.odt").exists(), is(true));
    assertThat(new File(pullBaseDir, "ods/zh_CN/test-ods.ods").exists(), is(true));
  }
Example #7
0
 /**
  * Reads a tab-separated file and puts the contents into a map.
  *
  * <p>We give a few options: - You can set the index for the key to the map. If the key is not
  * zero, we only add the first column as a value to this map (and so setting overwrite to true in
  * this case doesn't make a whole lot of sense - just use readMapFromTsv instead). - If overwrite
  * is true, we don't bother checking to see if the key is already in the map. This will speed up
  * the processing if you know that your file only has one line per unique key. - You can provide a
  * LineFilter object that wlil be called with each line to determine if it should be skipped.
  */
 public Map<String, List<String>> readMapListFromTsvReader(
     BufferedReader reader, int keyIndex, boolean overwrite, LineFilter filter)
     throws IOException {
   Map<String, List<String>> map = Maps.newHashMap();
   String line;
   while ((line = reader.readLine()) != null) {
     String[] fields = line.split("\t");
     if (filter != null && filter.filter(fields)) continue;
     String key = fields[keyIndex];
     List<String> list;
     if (overwrite) {
       list = Lists.newArrayList();
       map.put(key, list);
     } else {
       list = map.get(key);
       if (list == null) {
         list = Lists.newArrayList();
         map.put(key, list);
       }
     }
     if (keyIndex == 0) {
       for (int i = 1; i < fields.length; i++) {
         list.add(fields[i]);
       }
     } else {
       list.add(fields[0]);
     }
   }
   return map;
 }
  /**
   * 构建ResultMap对象
   *
   * @param id
   * @param clazz
   * @param configuration
   * @return
   */
  private ResultMap buildResultMap(String id, Class<?> clazz, Configuration configuration) {
    // 判断是否已经存在缓存里
    if (configuration.hasResultMap(id)) {
      return configuration.getResultMap(id);
    }
    List<ResultMapping> resultMappings = Lists.newArrayList();
    Map<String, Field> columns = EntityUtil.getFields(clazz);
    for (Map.Entry<String, Field> column : columns.entrySet()) {
      Field field = column.getValue();
      String fieldName = field.getName();
      Class<?> columnTypeClass = resolveResultJavaType(clazz, fieldName, null);
      List<ResultFlag> flags = Lists.newArrayList();
      if (field.isAnnotationPresent(Id.class)) {
        flags.add(ResultFlag.ID);
      }
      String columnName = column.getKey();
      resultMappings.add(
          buildResultMapping(configuration, fieldName, columnName, columnTypeClass, flags));
    }

    // 构建ResultMap
    ResultMap.Builder resultMapBuilder =
        new ResultMap.Builder(configuration, id, clazz, resultMappings);
    ResultMap rm = resultMapBuilder.build();
    // 放到缓存中
    configuration.addResultMap(rm);
    return rm;
  }
  static Map<String, WorkflowMappingBean> asMappingBeans(WorkflowScheme scheme) {
    if (scheme.getMappings().isEmpty()) {
      return ImmutableMap.of();
    }

    Map<String, WorkflowMappingBean> mappings = Maps.newHashMap();
    for (Map.Entry<String, String> mapping : scheme.getMappings().entrySet()) {
      final String issueType = mapping.getKey();
      final String workflowName = mapping.getValue();
      WorkflowMappingBean mappingBean = mappings.get(workflowName);
      if (mappingBean == null) {
        mappingBean = new WorkflowMappingBean(workflowName, Lists.<String>newArrayList());
        mappingBean.setDefaultMapping(false);
        mappings.put(workflowName, mappingBean);
      }
      if (issueType != null) {
        mappingBean.addIssueType(issueType);
      } else {
        mappingBean.setDefaultMapping(true);
      }
    }
    if (scheme.getConfiguredDefaultWorkflow() == null) {
      WorkflowMappingBean bean = mappings.get(JiraWorkflow.DEFAULT_WORKFLOW_NAME);
      if (bean == null) {
        bean =
            new WorkflowMappingBean(
                JiraWorkflow.DEFAULT_WORKFLOW_NAME, Lists.<String>newArrayList());
        bean.setIssueTypes(Collections.<String>emptyList());
        mappings.put(JiraWorkflow.DEFAULT_WORKFLOW_NAME, bean);
      }
      bean.setDefaultMapping(true);
    }
    return mappings;
  }
Example #10
0
  private List<DependentModule> createDependenciesInOrder(
      boolean includeSourceFolder,
      LinkedHashSet<DependentModule> dependencies,
      DependentModule jdkDependency) {
    List<DependentModule> dependenciesInOrder = Lists.newArrayList();

    // If the source folder module is present, add it to the front of the list.
    if (includeSourceFolder) {
      dependenciesInOrder.add(DependentModule.newSourceFolder());
    }

    // List the libraries before the non-libraries.
    List<DependentModule> nonLibraries = Lists.newArrayList();
    for (DependentModule dep : dependencies) {
      if (dep.isLibrary()) {
        dependenciesInOrder.add(dep);
      } else {
        nonLibraries.add(dep);
      }
    }
    dependenciesInOrder.addAll(nonLibraries);

    // Add the JDK last.
    dependenciesInOrder.add(jdkDependency);
    return dependenciesInOrder;
  }
 @Test
 public void testDivideList() {
   List<Integer> input = Lists.newArrayList();
   input.add(3);
   input.add(1);
   input.add(4);
   input.add(5);
   input.add(9);
   input.add(2);
   input.add(6);
   input.add(8);
   input.add(7);
   List<Integer> expected = Lists.newArrayList();
   expected.add(2);
   expected.add(1);
   expected.add(3);
   expected.add(5);
   expected.add(9);
   expected.add(4);
   expected.add(6);
   expected.add(8);
   expected.add(7);
   List<Integer> actual = ListManipulator.divideList(input, 0);
   assertNotNull("The returned list should not be null.", actual);
   assertEquals("The size of the returned list is incorrect.", expected.size(), actual.size());
   for (int i = 0; i < expected.size(); i++) {
     assertEquals("Incorrect value at index = " + i, expected.get(i), actual.get(i));
   }
 }
 private List<Cluster> doPrivilegedLookup(String partitionName, String vmTypeName)
     throws NotEnoughResourcesException {
   if (Partition.DEFAULT_NAME.equals(partitionName)) {
     Iterable<Cluster> authorizedClusters =
         Iterables.filter(
             Clusters.getInstance().listValues(),
             RestrictedTypes.filterPrivilegedWithoutOwner());
     Multimap<VmTypeAvailability, Cluster> sorted = TreeMultimap.create();
     for (Cluster c : authorizedClusters) {
       sorted.put(c.getNodeState().getAvailability(vmTypeName), c);
     }
     if (sorted.isEmpty()) {
       throw new NotEnoughResourcesException(
           "Not enough resources: no availability zone is available in which you have permissions to run instances.");
     } else {
       return Lists.newArrayList(sorted.values());
     }
   } else {
     ServiceConfiguration ccConfig =
         Topology.lookup(ClusterController.class, Partitions.lookupByName(partitionName));
     Cluster cluster = Clusters.lookup(ccConfig);
     if (cluster == null) {
       throw new NotEnoughResourcesException("Can't find cluster " + partitionName);
     }
     if (!RestrictedTypes.filterPrivilegedWithoutOwner().apply(cluster)) {
       throw new NotEnoughResourcesException("Not authorized to use cluster " + partitionName);
     }
     return Lists.newArrayList(cluster);
   }
 }
  @Override
  public void generate(TypeDeclaration node) {
    syncLineNumbers(node.getName()); // avoid doc-comment

    String typeName = NameTable.getFullName(node);
    List<FieldDeclaration> fields = Lists.newArrayList(node.getFields());
    List<MethodDeclaration> methods = Lists.newArrayList(node.getMethods());
    fieldHiders = HiddenFieldDetector.getFieldNameConflicts(node);
    if (node.isInterface()) {
      printStaticInterface(node, typeName, fields, methods);
    } else {
      printf("@implementation %s\n\n", typeName);
      printStaticReferencesMethod(fields);
      printStaticVars(fields, /* isInterface */ false);
      printStaticFieldAccessors(fields, methods, /* isInterface */ false);
      printMethods(node);
      if (!Options.stripReflection()) {
        printTypeAnnotationsMethod(node);
        printMethodAnnotationMethods(Lists.newArrayList(node.getMethods()));
        printFieldAnnotationMethods(Lists.newArrayList(node.getFields()));
        printMetadata(node);
      }

      println("@end");
    }
  }
  @Test
  public void testGranularitySpecPostConstructorIntervals() {
    // Deprecated and replaced by granularitySpec, but still supported
    final HadoopDruidIndexerConfig cfg;

    try {
      cfg =
          jsonMapper.readValue(
              "{" + "\"segmentGranularity\":\"day\"" + "}", HadoopDruidIndexerConfig.class);
    } catch (Exception e) {
      throw Throwables.propagate(e);
    }

    cfg.setIntervals(Lists.newArrayList(new Interval("2012-03-01/P1D")));

    final UniformGranularitySpec granularitySpec =
        (UniformGranularitySpec) cfg.getGranularitySpec();

    Assert.assertEquals(
        "getIntervals",
        Lists.newArrayList(new Interval("2012-03-01/P1D")),
        granularitySpec.getIntervals());

    Assert.assertEquals("getGranularity", "DAY", granularitySpec.getGranularity().toString());
  }
Example #15
0
  public static void trainModel(String filteredDataPath, String modelpath) throws IOException {

    String line = "";
    String combline = "";
    // read and process raw data
    BufferedReader br = new BufferedReader(new FileReader(filteredDataPath));

    while ((line = br.readLine()) != null) combline = combline + " " + line;

    List<String> words = Lists.newArrayList(combline.split(" "));
    List<List<String>> localDoc = Lists.newArrayList(words, words);

    // build a context object
    JavaSparkContext sc = new JavaSparkContext("local", "Word2VecSuite");
    JavaRDD<List<String>> doc = sc.parallelize(localDoc);

    // training settings
    Word2Vec word2vec = new Word2Vec().setVectorSize(100).setMinCount(50).setSeed(42L);

    // train
    Word2VecModel model = word2vec.fit(doc);

    // save model
    SparkContext sc1 = sc.toSparkContext(sc);
    model.save(sc1, modelpath);
    System.out.println("Model has been saved in folder: " + modelpath);
  }
Example #16
0
 /** Returns all known role names. */
 public List<String> listRoles() throws IOException {
   ILockedRepository repo = null;
   try {
     repo = globalRepositoryManager.getProjectCentralRepository(REPOSITORY_NAME, false);
     File workingDir = RepositoryUtil.getWorkingDir(repo.r());
     FileFilter filter =
         new FileFilter() {
           @Override
           public boolean accept(File file) {
             return file.isFile() && file.getName().endsWith(ROLE_SUFFIX);
           }
         };
     List<File> files = Lists.newArrayList(workingDir.listFiles(filter));
     Function<File, String> function =
         new Function<File, String>() {
           @Override
           public String apply(File file) {
             return StringUtils.substringBeforeLast(file.getName(), ROLE_SUFFIX);
           }
         };
     List<String> users = Lists.newArrayList(Lists.transform(files, function));
     Collections.sort(users);
     return users;
   } finally {
     Util.closeQuietly(repo);
   }
 }
  @Test
  public void testGetAllWithBinaryData() throws Exception {
    Store<ByteArray, byte[], byte[]> store = getStore();
    List<ByteArray> keys = Lists.newArrayList();
    List<byte[]> values = Lists.newArrayList();

    // The Byte 0x8c is interesting, because if you use GetContent method of
    // MimeBodyPart it gets converted to 0xc2 and 0x8c
    // This thread tracks this question
    // http://stackoverflow.com/questions/23023583/mimebodypart-getcontent-corrupts-binary-data

    byte[] interestingByte = new byte[] {(byte) 0x8c};
    ByteArray interestingKey = new ByteArray(interestingByte);

    keys.add(interestingKey);
    values.add(interestingByte);

    // Add all possible byte values
    byte[] allPossibleBytes = getAllPossibleBytes();
    ByteArray allPossibleKey = new ByteArray(allPossibleBytes);
    keys.add(allPossibleKey);
    values.add(allPossibleBytes);

    assertEquals(keys.size(), values.size());
    int count = keys.size();

    for (int i = 0; i < count; i++) {
      VectorClock vc = getClock(0, 0);
      store.put(keys.get(i), new Versioned<byte[]>(values.get(i), vc), null);
    }

    Map<ByteArray, List<Versioned<byte[]>>> result = store.getAll(keys, null);
    assertGetAllValues(keys, values, result);
  }
Example #18
0
  @Before
  @SuppressWarnings("unchecked")
  public void setUp() throws IOException {
    String inputFile1 = tmpDir.copyResourceFileName("set1.txt");
    String inputFile2 = tmpDir.copyResourceFileName("set2.txt");
    if (pipelineClass == null) {
      pipeline = MemPipeline.getInstance();
    } else {
      pipeline = new MRPipeline(pipelineClass, tmpDir.getDefaultConfiguration());
    }
    PCollection<String> firstCollection =
        pipeline.read(At.textFile(inputFile1, typeFamily.strings()));
    PCollection<String> secondCollection =
        pipeline.read(At.textFile(inputFile2, typeFamily.strings()));

    LOG.info(
        "Test fixture: ["
            + pipeline.getClass().getSimpleName()
            + " : "
            + typeFamily.getClass().getSimpleName()
            + "]  First: "
            + Lists.newArrayList(firstCollection.materialize().iterator())
            + ", Second: "
            + Lists.newArrayList(secondCollection.materialize().iterator()));

    union = secondCollection.union(firstCollection);
  }
Example #19
0
  public List getStichSlots() {
    ArrayList arraylist = Lists.newArrayList();
    Iterator iterator = this.stitchSlots.iterator();

    while (iterator.hasNext()) {
      Stitcher.Slot slot = (Stitcher.Slot) iterator.next();
      slot.getAllStitchSlots(arraylist);
    }

    ArrayList arraylist1 = Lists.newArrayList();
    Iterator iterator1 = arraylist.iterator();

    while (iterator1.hasNext()) {
      Stitcher.Slot slot1 = (Stitcher.Slot) iterator1.next();
      Stitcher.Holder holder = slot1.getStitchHolder();
      TextureAtlasSprite textureatlassprite = holder.getAtlasSprite();
      textureatlassprite.initSprite(
          this.currentWidth,
          this.currentHeight,
          slot1.getOriginX(),
          slot1.getOriginY(),
          holder.isRotated());
      arraylist1.add(textureatlassprite);
    }

    return arraylist1;
  }
  private void removeTags(final String imageId) throws Exception {
    final ImageInfo image = Images.lookupImage(imageId);
    final String imageOwnerId = image.getOwnerUserId();

    DeleteTagsTask task =
        new DeleteTagsTask(
            imageOwnerId,
            Lists.newArrayList(image.getDisplayName()),
            Lists.newArrayList(TAG_KEY_STATE, TAG_KEY_MESSAGE));
    CheckedListenableFuture<Boolean> result = task.dispatch();
    if (result.get()) {;
    }
    final List<VmInstance> instances = this.lookupInstances(imageId);
    for (final VmInstance instance : instances) {
      final String instanceId = instance.getInstanceId();
      final String instanceOwnerId = instance.getOwnerUserId();
      try {
        task =
            new DeleteTagsTask(
                instanceOwnerId,
                Lists.newArrayList(instanceId),
                Lists.newArrayList(TAG_KEY_STATE, TAG_KEY_MESSAGE));
        result = task.dispatch();
        if (result.get()) {;
        }
      } catch (final Exception ex) {;
      }
    }
  }
  @Override
  public DataResponse<Iterable<TestResult>> search(TestResultRequest request) {
    Component component = componentRepository.findOne(request.getComponentId());
    if (!component.getCollectorItems().containsKey(CollectorType.Test)) {
      return new DataResponse<>(null, 0L);
    }
    List<TestResult> result = new ArrayList<>();

    for (CollectorItem item : component.getCollectorItems().get(CollectorType.Test)) {

      QTestResult testResult = new QTestResult("testResult");
      BooleanBuilder builder = new BooleanBuilder();

      builder.and(testResult.collectorItemId.eq(item.getId()));

      if (request.validStartDateRange()) {
        builder.and(
            testResult.startTime.between(request.getStartDateBegins(), request.getStartDateEnds()));
      }
      if (request.validEndDateRange()) {
        builder.and(
            testResult.endTime.between(request.getEndDateBegins(), request.getEndDateEnds()));
      }

      if (request.validDurationRange()) {
        builder.and(
            testResult.duration.between(
                request.getDurationGreaterThan(), request.getDurationLessThan()));
      }

      if (!request.getTypes().isEmpty()) {
        builder.and(testResult.testCapabilities.any().type.in(request.getTypes()));
      }

      if (request.getMax() == null) {
        result.addAll(
            Lists.newArrayList(
                testResultRepository.findAll(builder.getValue(), testResult.timestamp.desc())));
      } else {
        PageRequest pageRequest =
            new PageRequest(0, request.getMax(), Sort.Direction.DESC, "timestamp");
        result.addAll(
            Lists.newArrayList(
                testResultRepository.findAll(builder.getValue(), pageRequest).getContent()));
      }
    }
    // One collector per Type. get(0) is hardcoded.
    if (!CollectionUtils.isEmpty(component.getCollectorItems().get(CollectorType.Test))
        && (component.getCollectorItems().get(CollectorType.Test).get(0) != null)) {
      Collector collector =
          collectorRepository.findOne(
              component.getCollectorItems().get(CollectorType.Test).get(0).getCollectorId());
      if (collector != null) {
        return new DataResponse<>(
            pruneToDepth(result, request.getDepth()), collector.getLastExecuted());
      }
    }

    return new DataResponse<>(null, 0L);
  }
Example #22
0
  @Test
  public void testBuildJson() {
    Employee emp =
        new Employee("Amit", "Kapoor")
            .setPhones(
                Lists.newArrayList(new Phone("+1", 1234567890L), new Phone("+91", 9809901234L)));
    Employee emp2 =
        new Employee("Tim", "Johnes")
            .setPhones(
                Lists.newArrayList(new Phone("+1", 1234567890L), new Phone("+91", 9809901234L)));

    String empJson =
        new JSONSerializer()
            .exclude("*.class", "fullName")
            .serialize(Lists.newArrayList(emp, emp2));
    System.out.println("JSON: \n" + empJson);

    empJson =
        new JSONSerializer()
            .include("firstName", "lastName")
            .exclude("*")
            .serialize(Lists.newArrayList(emp, emp2));
    System.out.println("JSON: \n" + empJson);
    /*
            Employee employee = new JSONDeserializer<Employee>().use(null, Employee.class).deserialize(empJson);
            System.out.println("Deserialized EmpJson: \n" + employee);
    */

  }
  private void checkPossiblesTimeboxes(
      final Participant participant, List<TimeBox> possibleTbs, Priority priority) {

    if (possibleTbs.size() == 1) {

      boolean allocated = allocateTimeBox(possibleTbs.get(0), participant);
      if (allocated) return;
    }

    if (!possibleTbs.isEmpty()) {

      buffer.put(participant, Lists.newArrayList(possibleTbs));
      System.out.println("insert " + possibleTbs.size() + " timeboxes in the buffer");

      final Priority nextPriority = getNextPriority(priority);

      if (nextPriority != null) {

        System.out.println("set priority level to : " + nextPriority.getRole());

        // filter the unavaibilities to get only the ones matching the current priority level
        Collection<Unavailability> unavailabilities =
            Collections2.filter(
                this.unavailabilities,
                new Predicate<Unavailability>() {
                  public boolean apply(Unavailability a) {
                    Person p = a.getPerson();
                    return (p.equals(participant.getStudent())
                            || p.equals(participant.getFollowingTeacher()))
                        && (p.getRole() == nextPriority.getRole());
                  }
                });

        System.out.println("unavailabilities found: " + unavailabilities.size());
        System.out.println("{");
        for (Unavailability ua : unavailabilities) {
          System.out.println(ua.getPeriod().getFrom() + " - " + ua.getPeriod().getTo());
        }
        System.out.println("}");

        if (!unavailabilities.isEmpty()) {
          for (TimeBox timeBox : Lists.newArrayList(possibleTbs)) {

            System.out.println(
                "check unavailability "
                    + (new DateTime(timeBox.getFrom()).toString("dd/MM/yyyy HH:mm")));

            // Check if there is no unavailabilities for that timebox
            if (!AlgoPlanningUtils.isAvailable(unavailabilities, timeBox)) {
              System.out.println("removing one timebox...");
              possibleTbs.remove(timeBox);
            }
          }
        }

        // let's do it again
        checkPossiblesTimeboxes(participant, possibleTbs, nextPriority);
      }
    }
  }
Example #24
0
  public SonarComponents(
      FileLinesContextFactory fileLinesContextFactory,
      ResourcePerspectives resourcePerspectives,
      FileSystem fs,
      JavaClasspath javaClasspath,
      JavaTestClasspath javaTestClasspath,
      CheckFactory checkFactory,
      @Nullable CheckRegistrar[] checkRegistrars) {
    this.fileLinesContextFactory = fileLinesContextFactory;
    this.resourcePerspectives = resourcePerspectives;
    this.fs = fs;
    this.javaClasspath = javaClasspath;
    this.javaTestClasspath = javaTestClasspath;
    this.checkFactory = checkFactory;
    checks = Lists.newArrayList();

    if (checkRegistrars != null) {
      CheckRegistrar.RegistrarContext registrarContext = new CheckRegistrar.RegistrarContext();
      for (CheckRegistrar checkClassesRegister : checkRegistrars) {
        checkClassesRegister.register(registrarContext);
        registerCheckClasses(
            registrarContext.repositoryKey(), Lists.newArrayList(registrarContext.checkClasses()));
      }
    }
  }
Example #25
0
  public void testActions() {
    ActionDef def = Mockito.mock(ActionDef.class);
    Action test = new MyAction(null, def, null);

    List<Action> actions = test.getActions();
    assertNotNull("Actions should never be null", actions);
    assertEquals("Actions should empty", 0, actions.size());

    List<Action> newActions = Lists.newArrayList(getActionWithId("a"), getActionWithId("b"));
    test.add(newActions);
    actions = test.getActions();
    assertNotNull("Actions should never be null", actions);
    assertEquals("Actions should be length 2", 2, actions.size());
    assertEquals("Action 'a' should be first", "a", actions.get(0).getId());
    assertEquals("Action 'b' should be first", "b", actions.get(1).getId());

    newActions = Lists.newArrayList(getActionWithId("c"), getActionWithId("d"));
    test.add(newActions);
    actions = test.getActions();
    assertNotNull("Actions should never be null", actions);
    assertEquals("Actions should be length 4", 4, actions.size());
    assertEquals("Action 'a' should be first", "a", actions.get(0).getId());
    assertEquals("Action 'b' should be first", "b", actions.get(1).getId());
    assertEquals("Action 'c' should be first", "c", actions.get(2).getId());
    assertEquals("Action 'd' should be first", "d", actions.get(3).getId());
  }
  @Benchmark
  @BenchmarkMode(Mode.AverageTime)
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception {
    List<QueryRunner<Result<TimeseriesResultValue>>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
      String segmentName = "qIndex" + i;
      QueryRunner<Result<TimeseriesResultValue>> runner =
          QueryBenchmarkUtil.makeQueryRunner(
              factory, segmentName, new QueryableIndexSegment(segmentName, qIndexes.get(i)));
      singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }

    QueryRunner theRunner =
        toolChest.postMergeQueryDecoration(
            new FinalizeResultsQueryRunner<>(
                toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)),
                toolChest));

    Sequence<Result<TimeseriesResultValue>> queryResult =
        theRunner.run(query, Maps.<String, Object>newHashMap());
    List<Result<TimeseriesResultValue>> results =
        Sequences.toList(queryResult, Lists.<Result<TimeseriesResultValue>>newArrayList());

    for (Result<TimeseriesResultValue> result : results) {
      blackhole.consume(result);
    }
  }
  class MockOutputMutator implements OutputMutator {
    List<MaterializedField> removedFields = Lists.newArrayList();
    List<ValueVector> addFields = Lists.newArrayList();

    List<MaterializedField> getRemovedFields() {
      return removedFields;
    }

    List<ValueVector> getAddFields() {
      return addFields;
    }

    @Override
    public void addFields(List<ValueVector> vv) {
      return;
    }

    @Override
    public <T extends ValueVector> T addField(MaterializedField field, Class<T> clazz)
        throws SchemaChangeException {
      return null;
    }

    @Override
    public void allocate(int recordCount) {}

    @Override
    public boolean isNewSchema() {
      return false;
    }
  }
  @Test
  public void testShowEditorsInReadOnlyMode() {
    // Given:
    userWorkspaceContext.setProjectActive(false);
    userWorkspaceContext.setHasEditTranslationAccess(false);
    selectedTU = currentPageRows.get(0);
    ArrayList<ToggleEditor> currentEditors = Lists.newArrayList(editor);
    when(editor.getId()).thenReturn(selectedTU.getId());
    ArrayList<ToggleEditor> previousEditors = Lists.newArrayList(editor2);
    presenter.setStatesForTesting(null, 0, display);
    when(display.getId()).thenReturn(selectedTU.getId());
    when(display.getEditors()).thenReturn(previousEditors, currentEditors);
    when(sourceContentPresenter.getSourceContent(selectedTU.getId()))
        .thenReturn(Optional.of("source"));

    // When:
    presenter.setSelected(selectedTU.getId());

    // Then:
    verify(editorTranslators).clearTranslatorList(previousEditors);
    verify(editor).clearTranslatorList();
    verify(display).showButtons(false);
    verify(display).setToMode(ToggleEditor.ViewMode.VIEW);
    verify(editorKeyShortcuts).enableNavigationContext();
  }
 @Test
 public void substrLtrim() throws Exception {
   List<Expression> ltrimArgs = Lists.newArrayList(getInvertedLiteral("   blah", PDataType.CHAR));
   Expression ltrim = new LTrimFunction(ltrimArgs);
   List<Expression> substrArgs = Lists.newArrayList(ltrim, getLiteral(3), getLiteral(2));
   evaluateAndAssertResult(new SubstrFunction(substrArgs), "ah");
 }
Example #30
0
  @Test
  public void testGetTableCfsStr() {
    // opposite of TestPerTableCFReplication#testParseTableCFsFromConfig()

    Map<TableName, List<String>> tabCFsMap = null;

    // 1. null or empty string, result should be null
    assertEquals(null, ReplicationAdmin.getTableCfsStr(tabCFsMap));

    // 2. single table: "tab1" / "tab2:cf1" / "tab3:cf1,cf3"
    tabCFsMap = new TreeMap<TableName, List<String>>();
    tabCFsMap.put(TableName.valueOf("tab1"), null); // its table name is "tab1"
    assertEquals("tab1", ReplicationAdmin.getTableCfsStr(tabCFsMap));

    tabCFsMap = new TreeMap<TableName, List<String>>();
    tabCFsMap.put(TableName.valueOf("tab1"), Lists.newArrayList("cf1"));
    assertEquals("tab1:cf1", ReplicationAdmin.getTableCfsStr(tabCFsMap));

    tabCFsMap = new TreeMap<TableName, List<String>>();
    tabCFsMap.put(TableName.valueOf("tab1"), Lists.newArrayList("cf1", "cf3"));
    assertEquals("tab1:cf1,cf3", ReplicationAdmin.getTableCfsStr(tabCFsMap));

    // 3. multiple tables: "tab1 ; tab2:cf1 ; tab3:cf1,cf3"
    tabCFsMap = new TreeMap<TableName, List<String>>();
    tabCFsMap.put(TableName.valueOf("tab1"), null);
    tabCFsMap.put(TableName.valueOf("tab2"), Lists.newArrayList("cf1"));
    tabCFsMap.put(TableName.valueOf("tab3"), Lists.newArrayList("cf1", "cf3"));
    assertEquals("tab1;tab2:cf1;tab3:cf1,cf3", ReplicationAdmin.getTableCfsStr(tabCFsMap));
  }