Пример #1
1
  /**
   * reads all methods by the action-annotations for building agent-actions
   *
   * @param p_class class
   * @param p_root root class
   * @return stream of all methods with inheritance
   */
  private static Stream<Method> methods(final Class<?> p_class, final Class<?> p_root) {
    final Pair<Boolean, IAgentAction.EAccess> l_classannotation = CCommon.isActionClass(p_class);
    if (!l_classannotation.getLeft())
      return p_class.getSuperclass() == null
          ? Stream.of()
          : methods(p_class.getSuperclass(), p_root);

    final Predicate<Method> l_filter =
        IAgentAction.EAccess.WHITELIST.equals(l_classannotation.getRight())
            ? i -> !CCommon.isActionFiltered(i, p_root)
            : i -> CCommon.isActionFiltered(i, p_root);

    return Stream.concat(
        Arrays.stream(p_class.getDeclaredMethods())
            .parallel()
            .map(
                i -> {
                  i.setAccessible(true);
                  return i;
                })
            .filter(i -> !Modifier.isAbstract(i.getModifiers()))
            .filter(i -> !Modifier.isInterface(i.getModifiers()))
            .filter(i -> !Modifier.isNative(i.getModifiers()))
            .filter(i -> !Modifier.isStatic(i.getModifiers()))
            .filter(l_filter),
        methods(p_class.getSuperclass(), p_root));
  }
Пример #2
1
  @Override
  public <T> Collection<Resource> getAllocatedResources(Resource parent, Class<T> cls) {
    checkNotNull(parent);
    checkNotNull(cls);
    checkArgument(parent instanceof DiscreteResource);

    Versioned<Set<Resource>> children = childMap.get((DiscreteResource) parent);
    if (children == null) {
      return ImmutableList.of();
    }

    Stream<DiscreteResource> discrete =
        children
            .value()
            .stream()
            .filter(x -> x.last().getClass().equals(cls))
            .filter(x -> x instanceof DiscreteResource)
            .map(x -> (DiscreteResource) x)
            .filter(discreteConsumers::containsKey);

    Stream<ContinuousResource> continuous =
        children
            .value()
            .stream()
            .filter(x -> x.id().equals(parent.id().child(cls)))
            .filter(x -> x instanceof ContinuousResource)
            .map(x -> (ContinuousResource) x)
            .filter(x -> continuousConsumers.containsKey(x.id()))
            .filter(x -> continuousConsumers.get(x.id()) != null)
            .filter(x -> !continuousConsumers.get(x.id()).value().allocations().isEmpty());

    return Stream.concat(discrete, continuous).collect(Collectors.toList());
  }
Пример #3
0
  // Adds any missing device ports.
  private void addMissingPorts(Device device) {
    try {
      List<Port> ports = deviceService.getPorts(device.id());
      Set<ConnectPoint> existing =
          ports
              .stream()
              .map(p -> new ConnectPoint(device.id(), p.number()))
              .collect(Collectors.toSet());
      Set<ConnectPoint> missing =
          connectPoints
              .stream()
              .filter(cp -> cp.deviceId().equals(device.id()))
              .filter(cp -> !existing.contains(cp))
              .collect(Collectors.toSet());

      if (!missing.isEmpty()) {
        List<PortDescription> newPorts =
            Stream.concat(
                    ports.stream().map(this::description), missing.stream().map(this::description))
                .collect(Collectors.toList());
        deviceProviderService.updatePorts(device.id(), newPorts);
      }
    } catch (IllegalArgumentException e) {
      log.warn("Error pushing ports: {}", e.getMessage());
    }
  }
Пример #4
0
  @Override
  public Collection<Resource> getResources(ResourceConsumer consumer) {
    checkNotNull(consumer);

    // NOTE: getting all entries may become performance bottleneck
    // TODO: revisit for better backend data structure
    Stream<DiscreteResource> discreteStream =
        discreteConsumers
            .entrySet()
            .stream()
            .filter(x -> x.getValue().value().equals(consumer))
            .map(Map.Entry::getKey);

    Stream<ContinuousResource> continuousStream =
        continuousConsumers
            .values()
            .stream()
            .flatMap(
                x ->
                    x.value()
                        .allocations()
                        .stream()
                        .map(y -> Maps.immutableEntry(x.value().original(), y)))
            .filter(x -> x.getValue().consumer().equals(consumer))
            .map(x -> x.getKey());

    return Stream.concat(discreteStream, continuousStream).collect(Collectors.toList());
  }
Пример #5
0
  public static Assignment addHint(Assignment assignment, Hint hint) {

    assignment.setHints(
        Stream.concat(assignment.getHints().stream(), Stream.of(hint)).collect(toList()));

    return assignment;
  }
 @NotNull
 protected Stream<VirtualFile> getSelectedFiles() {
   return Stream.concat(
           getAfterRevisionsFiles(getSelectedChanges().stream()),
           getVirtualFiles(myViewer.getSelectionPaths(), null))
       .distinct();
 }
Пример #7
0
  @SuppressWarnings({"unchecked", "rawtypes"})
  @Override
  public Stream<?> doApply(List<Stream<?>> streamsList) {

    AtomicInteger ctr = new AtomicInteger(2);

    Stream<?> unionizedStream =
        streamsList
            .stream()
            .reduce(
                (lStream, rStream) -> {
                  Stream<?> newStream = Stream.concat(lStream, rStream);
                  int currentStreamIdx = ctr.getAndIncrement();
                  for (int j = 0; j < checkPointProcedures.size(); j++) {
                    Tuple2<Integer, Object> postProc = checkPointProcedures.get(j);
                    if ((Integer) postProc._1() == currentStreamIdx) {
                      SerFunction f = (SerFunction) postProc._2();
                      if (f != null) {
                        newStream = (Stream) f.apply(newStream);
                      }
                    }
                  }
                  return newStream;
                })
            .get();

    if (this.distinct) {
      unionizedStream = unionizedStream.distinct();
    }

    return unionizedStream;
  }
Пример #8
0
  public static void main(String[] args) {

    List<Map<String, Object>> list1 = new ArrayList<>();
    list1.add(map("Month", "August-13", "Sales", 282200));
    list1.add(map("Month", "July-13", "Sales", 310400));

    System.out.println("list1 = " + list1);

    List<Map<String, Object>> list2 = new ArrayList<>();
    list2.add(map("Month", "August-13", "NoOfTranx", 6700));
    list2.add(map("Month", "July-13", "NoOfTranx", 14400));

    System.out.println("list2 = " + list2);

    Map<String, Map<String, Object>> result =
        Stream.concat(list1.stream(), list2.stream())
            .collect(
                toMap(
                    m -> (String) m.get("Month"),
                    m -> m,
                    (m1, m2) -> {
                      m1.putAll(m2);
                      return m1;
                    }));

    List<Map<String, Object>> merge = new ArrayList<>(result.values());
    System.out.println("merge = " + merge);
  }
Пример #9
0
 protected OptionalEntity<FileConfig> getFileConfig(final CreateForm form) {
   final String username = systemHelper.getUsername();
   final long currentTime = systemHelper.getCurrentTimeAsLong();
   return getEntity(form, username, currentTime)
       .map(
           entity -> {
             entity.setUpdatedBy(username);
             entity.setUpdatedTime(currentTime);
             copyBeanToBean(
                 form,
                 entity,
                 op ->
                     op.exclude(
                         Stream.concat(
                                 Stream.of(Constants.COMMON_CONVERSION_RULE),
                                 Stream.of(Constants.PERMISSIONS))
                             .toArray(n -> new String[n])));
             final PermissionHelper permissionHelper = ComponentUtil.getPermissionHelper();
             entity.setPermissions(
                 split(form.permissions, "\n")
                     .get(
                         stream ->
                             stream
                                 .map(s -> permissionHelper.encode(s))
                                 .filter(StringUtil::isNotBlank)
                                 .distinct()
                                 .toArray(n -> new String[n])));
             return entity;
           });
 }
  @Override
  public void loadData(String fileName, String sheetName) throws RestLoaderException, IOException {

    // Initialize
    init();

    // Read
    log.info("Reading data from the file..");
    List<FileData<PatientVisitDTO>> patientVisits = excelFileReader.readData(fileName, sheetName);
    log.info("Data read complete..");

    // Transform
    log.info("Transforming the data read..");
    patientVisits = patientVisitDataTransformer.populateModelList(patientVisits);
    log.info("Data transformation complete..");

    // Load
    log.info("Loading the data into PIM..");
    List<FileData<PatientVisitDTO>> post =
        patientVisits
            .stream()
            .filter(
                data ->
                    DataLoaderConstants.POST_OPERATION.equalsIgnoreCase(data.getOperationType()))
            .collect(Collectors.toCollection(ArrayList::new));

    List<FileData<PatientVisitDTO>> put =
        patientVisits
            .stream()
            .filter(
                data -> DataLoaderConstants.PUT_OPERATION.equalsIgnoreCase(data.getOperationType()))
            .collect(Collectors.toCollection(ArrayList::new));

    Map<String, List<FileData<PatientVisitDTO>>> groupedPostData = convertToMap(post);
    post = doPost(groupedPostData);

    List<FileData<PatientVisitDTO>> putList = new ArrayList<>();
    put.forEach(
        dataSheet -> {
          try {
            addPathParam(
                env.getProperty(DataLoaderConstants.PATIENT_ID_KEY),
                dataSheet.getEntity().getPatientNumber());
            addPathParam(
                env.getProperty(DataLoaderConstants.PATIENT_VISIT_ID_KEY),
                dataSheet.getEntity().getId());
            putList.add(doPut(dataSheet));
          } catch (RestLoaderException e) {
          }
        });

    patientVisits = Stream.concat(post.stream(), putList.stream()).collect(Collectors.toList());
    log.info("Data loading complete..");

    // Write
    log.info("Writing the results back to the file..");
    excelFileWriter.writeResult(fileName, patientVisits, sheetName);
    log.info("Writing results complete..");
  }
Пример #11
0
 @VisibleForTesting
 String[] getRemoveCommand(URI uri) {
   String[] argsArray =
       Stream.concat(Stream.of(command, "remove", "-uri=" + uri), options.stream())
           .toArray(String[]::new);
   LOGGER.debug("COMMAND: {}", Arrays.deepToString(argsArray));
   return argsArray;
 }
Пример #12
0
 @Override
 public final Stream<? extends ITerm> apply(final IAgent<?> p_agent) {
   return Stream.concat(
       p_agent.runningplans().values().stream(),
       p_agent
           .beliefbase()
           .stream(m_paths.isEmpty() ? null : m_paths.toArray(new IPath[m_paths.size()])));
 }
Пример #13
0
 public static <T> LinkedList<T> create(T head, T... members) {
   return Try.catchAndThrow(
       () -> {
         // At least 1 parameter is ensured with this function signature!
         final List<T> args =
             Stream.concat(Stream.of(head), Arrays.stream(members)).collect(Collectors.toList());
         return TailCall.run(() -> build(new StackWindow<T>(args), null));
       });
 }
Пример #14
0
 private synchronized List<TaskSource> getSources() {
   return Stream.concat(
           Stream.of(planFragment.getPartitionedSourceNode()),
           planFragment.getRemoteSourceNodes().stream())
       .filter(Objects::nonNull)
       .map(PlanNode::getId)
       .map(this::getSource)
       .filter(Objects::nonNull)
       .collect(toImmutableList());
 }
Пример #15
0
  public static Set<Make> makeWithDpendencies(Make... makes) {

    EnumSet<Make> makeSet = EnumSet.noneOf(Make.class);

    makeSet.addAll(
        Arrays.asList(makes)
            .stream()
            .flatMap(m -> Stream.concat(dependencies.apply(m), Stream.of(m)))
            .collect(Collectors.toList()));

    return Collections.unmodifiableSet(makeSet);
  }
Пример #16
0
  private EvolutionStart<G, C> evolutionStart(
      final Iterable<Genotype<G>> genotypes, final long generation) {
    final Stream<Phenotype<G, C>> stream =
        Stream.concat(
            StreamSupport.stream(genotypes.spliterator(), false)
                .map(gt -> Phenotype.of(gt, generation, _fitnessFunction, _fitnessScaler)),
            Stream.generate(() -> newPhenotype(generation)));

    final Population<G, C> population = stream.limit(getPopulationSize()).collect(toPopulation());

    return EvolutionStart.of(population, generation);
  }
Пример #17
0
  public Geobuf.Data.Geometry polyToGeobuf(Polygon poly) {
    Geobuf.Data.Geometry.Builder builder =
        Geobuf.Data.Geometry.newBuilder().setType(Geobuf.Data.Geometry.Type.POLYGON);

    Stream<LineString> interiorRings =
        IntStream.range(0, poly.getNumInteriorRing()).mapToObj(poly::getInteriorRingN);

    Stream.concat(Stream.of(poly.getExteriorRing()), interiorRings)
        .forEach(r -> addRing(r, builder));

    return builder.build();
  }
Пример #18
0
    public WindowOperatorFactory(
        int operatorId,
        List<? extends Type> sourceTypes,
        List<Integer> outputChannels,
        List<WindowFunctionDefinition> windowFunctionDefinitions,
        List<Integer> partitionChannels,
        List<Integer> preGroupedChannels,
        List<Integer> sortChannels,
        List<SortOrder> sortOrder,
        int preSortedChannelPrefix,
        FrameInfo frameInfo,
        int expectedPositions) {
      requireNonNull(sourceTypes, "sourceTypes is null");
      requireNonNull(outputChannels, "outputChannels is null");
      requireNonNull(windowFunctionDefinitions, "windowFunctionDefinitions is null");
      requireNonNull(partitionChannels, "partitionChannels is null");
      requireNonNull(preGroupedChannels, "preGroupedChannels is null");
      checkArgument(
          partitionChannels.containsAll(preGroupedChannels),
          "preGroupedChannels must be a subset of partitionChannels");
      requireNonNull(sortChannels, "sortChannels is null");
      requireNonNull(sortOrder, "sortOrder is null");
      checkArgument(
          sortChannels.size() == sortOrder.size(),
          "Must have same number of sort channels as sort orders");
      checkArgument(
          preSortedChannelPrefix <= sortChannels.size(),
          "Cannot have more pre-sorted channels than specified sorted channels");
      checkArgument(
          preSortedChannelPrefix == 0
              || ImmutableSet.copyOf(preGroupedChannels)
                  .equals(ImmutableSet.copyOf(partitionChannels)),
          "preSortedChannelPrefix can only be greater than zero if all partition channels are pre-grouped");
      requireNonNull(frameInfo, "frameInfo is null");

      this.operatorId = operatorId;
      this.sourceTypes = ImmutableList.copyOf(sourceTypes);
      this.outputChannels = ImmutableList.copyOf(outputChannels);
      this.windowFunctionDefinitions = ImmutableList.copyOf(windowFunctionDefinitions);
      this.partitionChannels = ImmutableList.copyOf(partitionChannels);
      this.preGroupedChannels = ImmutableList.copyOf(preGroupedChannels);
      this.sortChannels = ImmutableList.copyOf(sortChannels);
      this.sortOrder = ImmutableList.copyOf(sortOrder);
      this.preSortedChannelPrefix = preSortedChannelPrefix;
      this.frameInfo = frameInfo;
      this.expectedPositions = expectedPositions;
      this.types =
          Stream.concat(
                  outputChannels.stream().map(sourceTypes::get),
                  windowFunctionDefinitions.stream().map(WindowFunctionDefinition::getType))
              .collect(toImmutableList());
    }
Пример #19
0
  private EvolutionStart<G, C> evolutionStart(
      final Population<G, C> population, final long generation) {
    final Stream<Phenotype<G, C>> stream =
        Stream.concat(
            population
                .stream()
                .map(p -> p.newInstance(p.getGeneration(), _fitnessFunction, _fitnessScaler)),
            Stream.generate(() -> newPhenotype(generation)));

    final Population<G, C> pop = stream.limit(getPopulationSize()).collect(toPopulation());

    return EvolutionStart.of(pop, generation);
  }
Пример #20
0
 public static void m1() {
   List<String> l =
       new ArrayList<String>() {
         {
           add("love");
           add("make");
           add("ok");
         }
       };
   Stream<Stream<Character>> result = l.stream().map(w -> characterStream(w));
   Stream<Character> result2 = l.stream().flatMap(w -> characterStream(w));
   Stream<Character> result3 = Stream.concat(characterStream("hello"), characterStream("world"));
 }
  private Stream<ElmReference> getReferencesFromNonSinglePath(List<ElmUpperCaseId> children) {
    ElmUpperCaseId lastChild = children.get(children.size() - 1);
    children.remove(children.size() - 1);
    int moduleTextLength = this.getTextLength() - lastChild.getTextLength() - 1;
    if (moduleTextLength < 0) {
      moduleTextLength = 0;
    }

    ElmReference reference = new ElmAbsoluteTypeReference(lastChild, children);

    return Stream.concat(
        Stream.of(
            new ElmContainingModuleReference(
                this, new TextRange(0, moduleTextLength), children.size(), reference)),
        Stream.of(reference.referenceInAncestor(this)));
  }
Пример #22
0
 @VisibleForTesting
 String[] getFlushCommand(URI dataFile, FileAttributes fileAttributes) {
   StorageInfo storageInfo = StorageInfos.extractFrom(fileAttributes);
   String[] argsArray =
       Stream.concat(
               Stream.of(
                   command,
                   "put",
                   fileAttributes.getPnfsId().toString(),
                   getFileString(dataFile),
                   "-si=" + storageInfo),
               options.stream())
           .toArray(String[]::new);
   LOGGER.debug("COMMAND: {}", Arrays.deepToString(argsArray));
   return argsArray;
 }
Пример #23
0
  @Override
  public DatabaseDto get(QualifiedName name, boolean includeUserMetadata) {
    Session session = validateAndGetSession(name);
    MetacatCatalogConfig config = metacatConnectorManager.getCatalogConfig(name.getCatalogName());

    QualifiedTablePrefix spec =
        new QualifiedTablePrefix(name.getCatalogName(), name.getDatabaseName());
    List<QualifiedTableName> tableNames = metadataManager.listTables(session, spec);
    List<QualifiedTableName> viewNames = Collections.emptyList();
    if (config.isIncludeViewsWithTables()) {
      // TODO JdbcMetadata returns ImmutableList.of() for views.  We should change it to fetch
      // views.
      viewNames = metadataManager.listViews(session, spec);
    }

    // Check to see if schema exists
    if (tableNames.isEmpty() && viewNames.isEmpty()) {
      if (!exists(name)) {
        throw new SchemaNotFoundException(name.getDatabaseName());
      }
    }

    ConnectorSchemaMetadata schema = metadataManager.getSchema(session);

    DatabaseDto dto = new DatabaseDto();
    dto.setType(metacatConnectorManager.getCatalogConfig(name).getType());
    dto.setName(name);
    dto.setUri(schema.getUri());
    dto.setMetadata(schema.getMetadata());
    dto.setTables(
        Stream.concat(tableNames.stream(), viewNames.stream())
            .map(QualifiedTableName::getTableName)
            .sorted(String.CASE_INSENSITIVE_ORDER)
            .collect(Collectors.toList()));
    if (includeUserMetadata) {
      log.info("Populate user metadata for schema {}", name);
      userMetadataService.populateMetadata(dto);
    }

    return dto;
  }
Пример #24
0
  public Geobuf.Data.Geometry multiPolyToGeobuf(MultiPolygon poly) {
    Geobuf.Data.Geometry.Builder builder =
        Geobuf.Data.Geometry.newBuilder().setType(Geobuf.Data.Geometry.Type.MULTIPOLYGON);

    // first we specify the number of polygons
    builder.addLengths(poly.getNumGeometries());

    for (int i = 0; i < poly.getNumGeometries(); i++) {
      Polygon p = (Polygon) poly.getGeometryN(i);
      // how many rings there are
      builder.addLengths(p.getNumInteriorRing() + 1);

      Stream<LineString> interiorRings =
          IntStream.range(0, p.getNumInteriorRing()).<LineString>mapToObj(p::getInteriorRingN);

      Stream.concat(Stream.of(p.getExteriorRing()), interiorRings)
          .forEach(r -> addRing(r, builder));
    }

    return builder.build();
  }
Пример #25
0
  /**
   * Create a method conveniently. The method is added to the class "TestClass". Parameters can be
   * given as an (positional) array of local variables (the "identity statements", required by Soot
   * to map parameters to local variables, are inserted automatically)
   */
  public SootMethod makeMethod(
      int modifier, String name, List<Local> params, soot.Type retType, List<Unit> bodyStmts) {
    SootMethod m =
        new SootMethod(
            name, params.stream().map(Local::getType).collect(toList()), retType, modifier);
    this.testClass.addMethod(m);
    Body body = Jimple.v().newBody(m);
    m.setActiveBody(body);

    // set the statements for the body.. first the identity statements, then the bodyStmts
    if (!m.isStatic()) {
      body.getLocals().add(localThis);
      body.getUnits()
          .add(Jimple.v().newIdentityStmt(localThis, Jimple.v().newThisRef(testClass.getType())));
    }
    IntStream.range(0, params.size())
        .forEach(
            pos -> {
              Local l = params.get(pos);
              ParameterRef pr = Jimple.v().newParameterRef(l.getType(), pos);
              body.getUnits().add(Jimple.v().newIdentityStmt(l, pr));
            });
    body.getUnits().addAll(bodyStmts);

    // set the locals for the body
    Set<Local> locals =
        Stream.concat(
                params.stream(),
                body.getUseAndDefBoxes()
                    .stream()
                    .filter(b -> b.getValue() instanceof Local)
                    .map(b -> (Local) b.getValue()))
            .collect(toSet());
    locals.removeAll(body.getLocals());
    body.getLocals().addAll(locals);

    return m;
  }
  public static void main(String[] args) {
    List<String> list = new ArrayList<>();
    list.add("aa");
    list.add("cccc");
    list.add("bbb");

    /** Stream的使用: 创建/获取流 -> 中间操作(过滤、转换等) -> 终止操作( 聚合、收集结果) */
    list.stream().forEach(System.out::println);
    System.out.println();

    /** 过滤 collect语法 {@link StreamCollectTest} */
    List list0 = list.stream().filter(str -> str.startsWith("cc")).collect(Collectors.toList());
    List list1 = list.stream().filter(str -> str.startsWith("aa")).collect(Collectors.toList());

    list0.stream().forEach(System.out::println);
    list1.stream().forEach(System.out::println);
    System.out.println();

    /** 转换 */
    List list2 = list.stream().map(str -> str.replace("c", "*")).collect(Collectors.toList());

    list2.stream().forEach(System.out::println);
    System.out.println();

    /** 提取 从skip开始至limit位置为止 */
    List list3 = list.stream().skip(0).limit(1).collect(Collectors.toList());

    list3.stream().forEach(System.out::println);
    System.out.println();

    /** 组合 */
    List list4 = Stream.concat(list.stream(), list.stream()).collect(Collectors.toList());

    list4.stream().forEach(System.out::println);
    System.out.println();
  }
Пример #27
0
  /**
   * Get the dependencies. Separated from preInit due to issues with ordering in case mods need to
   * download mods before the preInit method is called. The wrapper just needs to call this method
   * right before it downloads the dependencies.
   */
  public void generateDependencies() {
    neededDeps = new HashMap<>(); // This should be cleaned every time this method is run.

    Stream.concat(javaClasses.values().stream(), scalaClasses.values().stream())
        .forEach(this::generateAndAddDependencies);
  }
Пример #28
0
  @Override
  public void accumulate(final VariantContext ctx) {
    logger.record(ctx.getContig(), ctx.getStart());

    final String variantChrom = ctx.getContig();
    final int variantPos = ctx.getStart();

    // Skip anything a little too funky
    if (ctx.isFiltered()) return;
    if (!ctx.isVariant()) return;
    if (SKIP_CHROMS.contains(variantChrom)) return;

    for (final MendelianViolationMetrics trio : trios) {
      final Genotype momGt = ctx.getGenotype(trio.MOTHER);
      final Genotype dadGt = ctx.getGenotype(trio.FATHER);
      final Genotype kidGt = ctx.getGenotype(trio.OFFSPRING);

      // if any genotype:
      // - has a non-snp allele; or
      // - lacks a reference allele
      //
      // then ignore this trio
      if (CollectionUtil.makeList(momGt, dadGt, kidGt)
          .stream()
          .anyMatch(
              gt ->
                  gt.isHetNonRef()
                      || Stream.concat(Stream.of(ctx.getReference()), gt.getAlleles().stream())
                          .anyMatch(a -> a.length() != 1 || a.isSymbolic()))) {
        continue;
      }

      // if between the trio there are more than 2 alleles including the reference, continue
      if (Stream.concat(
                  Collections.singleton(ctx.getReference()).stream(),
                  CollectionUtil.makeList(momGt, dadGt, kidGt)
                      .stream()
                      .flatMap(gt -> gt.getAlleles().stream()))
              .collect(Collectors.toSet())
              .size()
          > 2) continue;

      // Test to make sure:
      //   1) That the site is in fact variant in the trio
      //   2) that the offspring doesn't have a really wacky het allele balance
      if (!isVariant(momGt, dadGt, kidGt)) continue;
      if (kidGt.isHet()) {
        final int[] ad = kidGt.getAD();
        if (ad == null) continue;

        final List<Integer> adOfAlleles =
            kidGt
                .getAlleles()
                .stream()
                .map(a -> ad[ctx.getAlleleIndex(a)])
                .collect(Collectors.toList());
        final double minAlleleFraction =
            Math.min(adOfAlleles.get(0), adOfAlleles.get(1))
                / (double) (adOfAlleles.get(0) + adOfAlleles.get(1));
        if (minAlleleFraction < MIN_HET_FRACTION) continue;
      }

      ///////////////////////////////////////////////////////////////
      // Determine whether the offspring should be haploid at this
      // locus and which is the parental donor of the haploid genotype
      ///////////////////////////////////////////////////////////////
      boolean haploid = false;
      Genotype haploidParentalGenotype = null;

      if (FEMALE_CHROMS.contains(variantChrom) && trio.OFFSPRING_SEX != Sex.Unknown) {
        if (trio.OFFSPRING_SEX == Sex.Female) {
          // famale
          haploid = false;
        } else if (isInPseudoAutosomalRegion(variantChrom, variantPos)) {
          // male but in PAR on X, so diploid
          haploid = false;
        } else {
          // male, out of PAR on X, haploid
          haploid = true;
          haploidParentalGenotype = momGt;
        }
      }

      // the PAR on the male chromosome should be masked so that reads
      // align to the female chromosomes instead, so there's no point
      // of worrying about that here.

      if (MALE_CHROMS.contains(variantChrom)) {
        if (trio.OFFSPRING_SEX == Sex.Male) {
          haploid = true;
          haploidParentalGenotype = dadGt;
        } else {
          continue;
        }
      }

      // We only want to look at sites where we have high enough confidence that the genotypes we
      // are looking at are
      // interesting.  We want to ensure that parents are always GQ>=MIN_GQ, and that the kid is
      // either GQ>=MIN_GQ or in the
      // case where kid is het that the phred-scaled-likelihood of being reference is >=MIN_GQ.
      if (haploid
          && (haploidParentalGenotype.isNoCall() || haploidParentalGenotype.getGQ() < MIN_GQ))
        continue;
      if (!haploid
          && (momGt.isNoCall()
              || momGt.getGQ() < MIN_GQ
              || dadGt.isNoCall()
              || dadGt.getGQ() < MIN_GQ)) continue;
      if (kidGt.isNoCall()) continue;
      if (momGt.isHomRef() && dadGt.isHomRef() && !kidGt.isHomRef()) {
        if (kidGt.getPL()[0] < MIN_GQ) continue;
      } else if (kidGt.getGQ() < MIN_GQ) continue;

      // Also filter on the DP for each of the samples - it's possible to miss hets when DP is too
      // low
      if (haploid && (kidGt.getDP() < MIN_DP || haploidParentalGenotype.getDP() < MIN_DP)) continue;
      if (!haploid && (kidGt.getDP() < MIN_DP || momGt.getDP() < MIN_DP || dadGt.getDP() < MIN_DP))
        continue;

      trio.NUM_VARIANT_SITES++;

      ///////////////////////////////////////////////////////////////
      // First test for haploid violations
      ///////////////////////////////////////////////////////////////
      MendelianViolation type = null;
      if (haploid) {
        if (kidGt.isHet()) continue; // Should not see heterozygous calls at haploid regions

        if (!haploidParentalGenotype.getAlleles().contains(kidGt.getAllele(0))) {
          if (kidGt.isHomRef()) {
            type = MendelianViolation.Haploid_Other;
            trio.NUM_HAPLOID_OTHER++;
          } else {
            type = MendelianViolation.Haploid_Denovo;
            trio.NUM_HAPLOID_DENOVO++;
          }
        }
      }
      ///////////////////////////////////////////////////////////////
      // Then test for diploid mendelian violations
      ///////////////////////////////////////////////////////////////
      else if (isMendelianViolation(momGt, dadGt, kidGt)) {
        if (momGt.isHomRef() && dadGt.isHomRef() && !kidGt.isHomRef()) {
          trio.NUM_DIPLOID_DENOVO++;
          type = MendelianViolation.Diploid_Denovo;
        } else if (momGt.isHomVar() && dadGt.isHomVar() && kidGt.isHet()) {
          trio.NUM_HOMVAR_HOMVAR_HET++;
          type = MendelianViolation.HomVar_HomVar_Het;
        } else if (kidGt.isHom()
            && ((momGt.isHomRef() && dadGt.isHomVar()) || (momGt.isHomVar() && dadGt.isHomRef()))) {
          trio.NUM_HOMREF_HOMVAR_HOM++;
          type = MendelianViolation.HomRef_HomVar_Hom;
        } else if (kidGt.isHom()
            && ((momGt.isHom() && dadGt.isHet()) || (momGt.isHet() && dadGt.isHom()))) {
          trio.NUM_HOM_HET_HOM++;
          type = MendelianViolation.Hom_Het_Hom;
        } else {
          trio.NUM_OTHER++;
          type = MendelianViolation.Other;
        }
      }

      // Output a record into the family's violation VCF
      if (type != null) {
        // Create a new Context subsetted to the three samples
        final VariantContextBuilder builder = new VariantContextBuilder(ctx);
        builder.genotypes(
            ctx.getGenotypes()
                .subsetToSamples(CollectionUtil.makeSet(trio.MOTHER, trio.FATHER, trio.OFFSPRING)));
        builder.attribute(MENDELIAN_VIOLATION_KEY, type.name());

        // Copy over some useful attributes from the full context
        if (ctx.hasAttribute(VCFConstants.ALLELE_COUNT_KEY))
          builder.attribute(ORIGINAL_AC, ctx.getAttribute(VCFConstants.ALLELE_COUNT_KEY));
        if (ctx.hasAttribute(VCFConstants.ALLELE_FREQUENCY_KEY))
          builder.attribute(ORIGINAL_AF, ctx.getAttribute(VCFConstants.ALLELE_FREQUENCY_KEY));
        if (ctx.hasAttribute(VCFConstants.ALLELE_NUMBER_KEY))
          builder.attribute(ORIGINAL_AN, ctx.getAttribute(VCFConstants.ALLELE_NUMBER_KEY));

        // Write out the variant record
        familyToViolations.get(trio.FAMILY_ID).add(builder.make());
      }
    }
  }
Пример #29
0
 @Override
 public Stream<? extends Child<Project>> stream() {
   return Stream.concat(
       dbmsChildren.stream().sorted(Nameable.COMPARATOR),
       pluginDataChildren.stream().sorted(Nameable.COMPARATOR));
 }
  private void checkNexusFile(
      IRunnableDevice<ScanModel> scanner, List<ScanMetadata> scanMetadata, int... sizes)
      throws Exception {

    final ScanModel scanModel = ((AbstractRunnableDevice<ScanModel>) scanner).getModel();
    assertEquals(DeviceState.READY, scanner.getDeviceState());

    NXroot rootNode = getNexusRoot(scanner);
    NXentry entry = rootNode.getEntry();
    checkMetadata(entry, scanMetadata);
    // check that the scan points have been written correctly
    assertScanPointsGroup(entry, sizes);

    NXinstrument instrument = entry.getInstrument();

    LinkedHashMap<String, List<String>> signalFieldAxes = new LinkedHashMap<>();
    // axis for additional dimensions of a datafield, e.g. image
    signalFieldAxes.put(NXdetector.NX_DATA, Arrays.asList("real", "imaginary"));
    signalFieldAxes.put("spectrum", Arrays.asList("spectrum_axis"));
    signalFieldAxes.put("value", Collections.emptyList());

    String detectorName = scanModel.getDetectors().get(0).getName();
    NXdetector detector = instrument.getDetector(detectorName);
    // map of detector data field to name of nxData group where that field
    // is the @signal field
    Map<String, String> expectedDataGroupNames =
        signalFieldAxes
            .keySet()
            .stream()
            .collect(
                Collectors.toMap(
                    Function.identity(),
                    x -> detectorName + (x.equals(NXdetector.NX_DATA) ? "" : "_" + x)));

    // validate the main NXdata generated by the NexusDataBuilder
    Map<String, NXdata> nxDataGroups = entry.getChildren(NXdata.class);
    assertEquals(signalFieldAxes.size(), nxDataGroups.size());
    assertTrue(nxDataGroups.keySet().containsAll(expectedDataGroupNames.values()));
    for (String nxDataGroupName : nxDataGroups.keySet()) {
      NXdata nxData = entry.getData(nxDataGroupName);

      String sourceFieldName =
          nxDataGroupName.equals(detectorName)
              ? NXdetector.NX_DATA
              : nxDataGroupName.substring(nxDataGroupName.indexOf('_') + 1);
      assertSignal(nxData, sourceFieldName);
      // check the nxData's signal field is a link to the appropriate source data node of the
      // detector
      DataNode dataNode = detector.getDataNode(sourceFieldName);
      IDataset dataset = dataNode.getDataset().getSlice();
      assertSame(dataNode, nxData.getDataNode(sourceFieldName));
      assertTarget(
          nxData,
          sourceFieldName,
          rootNode,
          "/entry/instrument/" + detectorName + "/" + sourceFieldName);

      // check that the other primary data fields of the detector haven't been added to this NXdata
      for (String primaryDataFieldName : signalFieldAxes.keySet()) {
        if (!primaryDataFieldName.equals(sourceFieldName)) {
          assertNull(nxData.getDataNode(primaryDataFieldName));
        }
      }

      int[] shape = dataset.getShape();
      for (int i = 0; i < sizes.length; i++) assertEquals(sizes[i], shape[i]);

      // Make sure none of the numbers are NaNs. The detector
      // is expected to fill this scan with non-nulls.
      final PositionIterator it = new PositionIterator(shape);
      while (it.hasNext()) {
        int[] next = it.getPos();
        assertFalse(Double.isNaN(dataset.getDouble(next)));
      }

      // Check axes
      final IPosition pos = scanModel.getPositionIterable().iterator().next();
      final Collection<String> scannableNames = pos.getNames();

      // Append _value_demand to each name in list, then add detector axis fields to result
      List<String> expectedAxesNames =
          Stream.concat(
                  scannableNames.stream().map(x -> x + "_value_set"),
                  signalFieldAxes.get(sourceFieldName).stream())
              .collect(Collectors.toList());
      assertAxes(nxData, expectedAxesNames.toArray(new String[expectedAxesNames.size()]));

      int[] defaultDimensionMappings = IntStream.range(0, sizes.length).toArray();
      int i = -1;
      for (String scannableName : scannableNames) {

        i++;
        NXpositioner positioner = instrument.getPositioner(scannableName);
        assertNotNull(positioner);

        dataNode = positioner.getDataNode("value_set");
        dataset = dataNode.getDataset().getSlice();
        shape = dataset.getShape();
        assertEquals(1, shape.length);
        assertEquals(sizes[i], shape[0]);

        String nxDataFieldName = scannableName + "_value_set";
        assertSame(dataNode, nxData.getDataNode(nxDataFieldName));
        assertIndices(nxData, nxDataFieldName, i);
        assertTarget(
            nxData, nxDataFieldName, rootNode, "/entry/instrument/" + scannableName + "/value_set");

        // Actual values should be scanD
        dataNode = positioner.getDataNode(NXpositioner.NX_VALUE);
        dataset = dataNode.getDataset().getSlice();
        shape = dataset.getShape();
        assertArrayEquals(sizes, shape);

        nxDataFieldName = scannableName + "_" + NXpositioner.NX_VALUE;
        assertSame(dataNode, nxData.getDataNode(nxDataFieldName));
        assertIndices(nxData, nxDataFieldName, defaultDimensionMappings);
        assertTarget(
            nxData,
            nxDataFieldName,
            rootNode,
            "/entry/instrument/" + scannableName + "/" + NXpositioner.NX_VALUE);
      }
    }
  }