Exemple #1
0
 @Override
 public SetMultimap<Object, String> getContentAnchorages() {
   SetMultimap<Object, String> anchorages = HashMultimap.create();
   anchorages.put(getContent().getSource(), "START");
   anchorages.put(getContent().getTarget(), "END");
   return anchorages;
 }
  public ReverseCFATransformerFactory(Set<CFAEdge> cfa) {
    reverseCFA = HashMultimap.create();
    Set<Location> nonSinks = new HashSet<Location>();
    for (CFAEdge e : cfa) {
      reverseCFA.put(e.getTarget(), e);
      nonSinks.add(e.getSource());
    }

    FastSet<Location> sinks = new FastSet<Location>();
    for (Location l : reverseCFA.keySet()) {
      if (!nonSinks.contains(l)) {
        sinks.add(l);
      }
    }

    if (sinks.size() == 1) {
      sink = sinks.pick();
    } else if (sinks.size() == 0) {
      throw new RuntimeException("CFA has no sink!");
    } else {
      // Generate artificial exit node
      sink = new Location(new AbsoluteAddress(0xFFFFFF01L));
      for (Location l : sinks) {
        reverseCFA.put(sink, new CFAEdge(l, sink, new RTLSkip()));
      }
    }
  }
Exemple #3
0
 @Override
 public void addTrackedResources(Key intentKey, Collection<NetworkResource> resources) {
   for (NetworkResource resource : resources) {
     if (resource instanceof Link) {
       intentsByLink.put(linkKey((Link) resource), intentKey);
     } else if (resource instanceof ElementId) {
       intentsByDevice.put((ElementId) resource, intentKey);
     }
   }
 }
  /**
   * Filters SchemaContext for yang modules
   *
   * @param delegate original SchemaContext
   * @param rootModules modules (yang schemas) to be available and all their dependencies (modules
   *     importing rootModule and whole chain of their imports)
   * @param additionalModuleIds (additional) modules (yang schemas) to be available and whole chain
   *     of their imports
   */
  public FilteringSchemaContextProxy(
      final SchemaContext delegate,
      final Collection<ModuleId> rootModules,
      final Set<ModuleId> additionalModuleIds) {

    Preconditions.checkArgument(rootModules != null, "Base modules cannot be null.");
    Preconditions.checkArgument(additionalModuleIds != null, "Additional modules cannot be null.");

    final Builder<Module> filteredModulesBuilder = new Builder<>();

    final SetMultimap<URI, Module> nsMap =
        Multimaps.newSetMultimap(new TreeMap<URI, Collection<Module>>(), MODULE_SET_SUPPLIER);
    final SetMultimap<String, Module> nameMap =
        Multimaps.newSetMultimap(new TreeMap<String, Collection<Module>>(), MODULE_SET_SUPPLIER);

    ImmutableMap.Builder<ModuleIdentifier, String> identifiersToSourcesBuilder =
        ImmutableMap.builder();

    // preparing map to get all modules with one name but difference in revision
    final TreeMultimap<String, Module> nameToModulesAll = getStringModuleTreeMultimap();

    nameToModulesAll.putAll(getStringModuleMap(delegate));

    // in case there is a particular dependancy to view filteredModules/yang models
    // dependancy is checked for module name and imports
    processForRootModules(delegate, rootModules, filteredModulesBuilder);

    // adding additional modules
    processForAdditionalModules(delegate, additionalModuleIds, filteredModulesBuilder);

    filteredModulesBuilder.addAll(
        getImportedModules(
            Maps.uniqueIndex(delegate.getModules(), ModuleId.MODULE_TO_MODULE_ID),
            filteredModulesBuilder.build(),
            nameToModulesAll));

    /**
     * Instead of doing this on each invocation of getModules(), pre-compute it once and keep it
     * around -- better than the set we got in.
     */
    this.filteredModules = filteredModulesBuilder.build();

    for (final Module module : filteredModules) {
      nameMap.put(module.getName(), module);
      nsMap.put(module.getNamespace(), module);
      identifiersToSourcesBuilder.put(module, module.getSource());
    }

    namespaceToModules = ImmutableSetMultimap.copyOf(nsMap);
    nameToModules = ImmutableSetMultimap.copyOf(nameMap);
    identifiersToSources = identifiersToSourcesBuilder.build();
  }
  @Override
  public synchronized void addSplits(PlanNodeId sourceId, Iterable<Split> splits) {
    try (SetThreadName ignored = new SetThreadName("HttpRemoteTask-%s", taskId)) {
      requireNonNull(sourceId, "sourceId is null");
      requireNonNull(splits, "splits is null");
      checkState(
          !noMoreSplits.contains(sourceId), "noMoreSplits has already been set for %s", sourceId);

      // only add pending split if not done
      if (!getTaskInfo().getState().isDone()) {
        int added = 0;
        for (Split split : splits) {
          if (pendingSplits.put(
              sourceId, new ScheduledSplit(nextSplitId.getAndIncrement(), split))) {
            added++;
          }
        }
        if (sourceId.equals(planFragment.getPartitionedSource())) {
          pendingSourceSplitCount += added;
          fireSplitCountChanged(added);
        }
        needsUpdate.set(true);
      }

      scheduleUpdate();
    }
  }
 @VisibleForTesting
 public void addFreeformBlockFamily(BlockUri family, Iterable<String> categories) {
   freeformBlockUris.add(family);
   for (String category : categories) {
     categoryLookup.put(category, family);
   }
 }
Exemple #7
0
  /**
   * For each node in the graph, we want to find the longest path from the node to the root. The
   * length of the longest path is the depth at which the node should be drawn in the visualization
   * of the graph.
   */
  @VisibleForTesting
  static SetMultimap<Integer, String> calculateModuleDepths(
      String root, Map<String, List<String>> graph) {

    // To determine the longest path for each node, progressively descend
    // down the inverted dependency tree. Keep track of each module found at
    // that depth and record the deepest point at which the module was seen.
    SetMultimap<Integer, String> modulesAtDepth = HashMultimap.create();
    modulesAtDepth.put(0, root);
    Map<String, Integer> moduleToDepth = Maps.newHashMap();
    moduleToDepth.put(root, 0);

    int depth = 0;
    while (true) {
      Set<String> modules = modulesAtDepth.get(depth);
      if (modules.isEmpty()) {
        break;
      }
      int newDepth = ++depth;

      // For each module at the current depth, collect of its descendants so
      // they can be inserted in modulesAtDepth at their new depth.
      Set<String> atNewDepth = Sets.newHashSet();
      for (String module : modules) {
        List<String> descendants = graph.get(module);
        for (String descendant : descendants) {
          atNewDepth.add(descendant);
        }
      }

      // A module in atNewDepth may already be in the modulesAtDepth multimap.
      // If so, then the key with which it is associated in the multimap must
      // be changed. The moduleToDepth map is used to keep track of where each
      // module is in the multimap for quick lookup, so moduleToDepth must be
      // kept up to date, as well.
      for (String module : atNewDepth) {
        if (moduleToDepth.containsKey(module)) {
          int oldDepth = moduleToDepth.remove(module);
          modulesAtDepth.remove(oldDepth, module);
        }
        moduleToDepth.put(module, newDepth);
        modulesAtDepth.put(newDepth, module);
      }
    }

    return modulesAtDepth;
  }
 /** @see SubscriptionContext#subscribeToMembers(Group, Sensor, SensorEventListener) */
 public <T> SubscriptionHandle subscribeToMembers(
     Group parent, Sensor<T> sensor, SensorEventListener<? super T> listener) {
   SubscriptionHandle handle = context.subscribeToMembers(parent, sensor, listener);
   synchronized (subscriptions) {
     subscriptions.put(parent, handle);
   }
   return handle;
 }
 /**
  * @param family
  * @param andRegister Immediately registers the family - it is expected that the blocks have been
  *     given ids.
  */
 @VisibleForTesting
 public void addBlockFamily(BlockFamily family, boolean andRegister) {
   for (String category : family.getCategories()) {
     categoryLookup.put(category, family.getURI());
   }
   availableFamilies.put(family.getURI(), family);
   if (andRegister) {
     registerFamily(family);
   }
 }
 public <T> SubscriptionHandle subscribe(
     Map<String, ?> flags,
     Entity producer,
     Sensor<T> sensor,
     SensorEventListener<? super T> listener) {
   SubscriptionHandle handle = context.subscribe(flags, producer, sensor, listener);
   synchronized (subscriptions) {
     subscriptions.put(producer, handle);
   }
   return handle;
 }
  public static UUID register(BlockPosition bp, World world, String name, IConsoleAccess c) {
    synchronized (mutex) {
      if (unloadedWorlds.contains(world)) return null;
      UUID id = fabricUUID(bp, world, name);

      cs.put(id, c);
      worldMapping.put(world, c);

      return id;
    }
  }
  /** Returns documents grouped by partitions. */
  SetMultimap<Object, Document> getDocumentsByPartition(List<Document> documents) {
    final SetMultimap<Object, Document> index = HashMultimap.create();
    for (Document document : documents) {
      final Collection<Object> partitions = document.getField(partitionIdFieldName);
      for (Object partition : partitions) {
        index.put(partition, document);
      }
    }

    return ImmutableSetMultimap.copyOf(index);
  }
  // purchase product and return relevant product recommendations
  public List<Product> purchase(Customer customer, Product product) {
    purchasesCache.put(customer.id, product.id);

    ProductAssociativityGraph graph = productAssociativityGraphMap.get(product.category);
    Vertex v = Vertex.create(product.id);
    List<Vertex> associations = graph.getProductAssociations(v);

    int recommendSize = Math.min(associations.size(), maxNumRecommendations);
    return associations
        .stream()
        .map(vertex -> productCache.get(vertex.productId))
        .limit(recommendSize)
        .collect(Collectors.toList());
  }
Exemple #14
0
  protected SetMultimap<Integer, Integer> getNodePartitions(
      List<?> optNodes, List<?> optPartitions) {
    SetMultimap<Integer, Integer> nodePartitions = HashMultimap.create();

    if (optPartitions != null && optNodes != null) {
      for (Object node : optNodes) {
        for (Object partition : optPartitions)
          nodePartitions.put((Integer) node, (Integer) partition);
      }
    } else if (optPartitions != null) {
      for (Object partition : optPartitions) {
        for (Integer node : getNodes((Integer) partition)) {
          nodePartitions.put(node, (Integer) partition);
        }
      }
    } else if (optNodes != null) {
      for (Object node : optNodes) {
        nodePartitions.putAll((Integer) node, getPartitions((Integer) node));
      }
    } else throw new IllegalStateException();

    return nodePartitions;
  }
 private void directRender(Screen screen, CanvasRenderer renderer, MapCanvas canvas) {
   setBase(canvas, buffer.buffer);
   renderer.render(screen, canvas);
   byte[] canvasBuffer = getBuffer(canvas);
   for (int i = 0; i < canvasBuffer.length; ++i) {
     if (canvasBuffer[i] >= 0) {
       MapPixel pixel = new MapPixel(new Point(i % 128, i / 128), canvasBuffer[i]);
       if (pixels.containsValue(pixel)) pixels.values().remove(pixel);
       pixels.put(renderer, pixel);
       buffer.buffer[i] = canvasBuffer[i];
     }
   }
   setBase(canvas, buffer.buffer);
   Arrays.fill(getBuffer(canvas), (byte) -1);
 }
 ImmutableSet<Map<String, Object>> putRawNodesIfNotPresentAndStripMetaEntries(
     final Path buildFile,
     final ImmutableSet<Map<String, Object>> withoutMetaIncludes,
     final ImmutableSet<Path> dependentsOfEveryNode,
     ImmutableMap<String, ImmutableMap<String, Optional<String>>> configs) {
   try (AutoCloseableLock writeLock = rawAndComputedNodesLock.writeLock()) {
     ImmutableSet<Map<String, Object>> updated = allRawNodes.get(buildFile, withoutMetaIncludes);
     buildFileConfigs.put(buildFile, configs);
     if (updated == withoutMetaIncludes) {
       // We now know all the nodes. They all implicitly depend on everything in
       // the "dependentsOfEveryNode" set.
       for (Path dependent : dependentsOfEveryNode) {
         buildFileDependents.put(dependent, buildFile);
       }
     }
     return updated;
   }
 }
  private static void test() {
    String customerKey = "customer";
    String productKey = "product";

    IntStream.range(0, 5)
        .forEach(
            index ->
                customerCache.put(
                    customerKey + index, new Customer(customerKey + index, "name" + index)));
    IntStream.range(0, 100)
        .forEach(
            index ->
                productCache.put(
                    productKey + index, new Product(productKey + index, "category" + (index % 5))));

    Random random = new Random();

    Set<String> productIds = new HashSet<>();
    int activeProductSize = productCache.size() / 10;

    for (int i = 0; i < 1000; i++) {
      Customer customer = customerCache.get(customerKey + random.nextInt(customerCache.size()));

      int randomSuffix = random.nextInt(activeProductSize);
      if (randomSuffix < 3) {
        randomSuffix = random.nextInt(productCache.size());
      }
      Product product = productCache.get(productKey + randomSuffix);
      if (!productIds.contains(product.id)) {
        purchasesCache.put(customer.id, product.id);
        productIds.add(product.id);
      }
    }
    createProductAssociativityGraphPerCategory();

    ProductRecommendationSystem recommendationSystem = new ProductRecommendationSystem(7);
    for (int i = 0; i < activeProductSize * 2; i++) {
      Customer customer = customerCache.get(customerKey + random.nextInt(customerCache.size()));
      Product product = productCache.get(productKey + random.nextInt(activeProductSize));

      List<Product> recommendations = recommendationSystem.purchase(customer, product);
      System.out.printf("%s%n", recommendations);
    }
  }
  private Map<String, Collection<String>> scanValueRequirementBySecType(
      UniqueId portfolioId, ToolContext toolContext) {
    AvailableOutputsProvider availableOutputsProvider = toolContext.getAvaliableOutputsProvider();
    if (availableOutputsProvider == null) {
      throw new OpenGammaRuntimeException("AvailableOutputsProvider missing from ToolContext");
    }
    final SetMultimap<String, String> valueNamesBySecurityType = TreeMultimap.create();

    AvailableOutputs portfolioOutputs =
        availableOutputsProvider.getPortfolioOutputs(portfolioId, null);
    Set<String> securityTypes = portfolioOutputs.getSecurityTypes();
    for (String securityType : securityTypes) {
      Set<AvailableOutput> positionOutputs = portfolioOutputs.getPositionOutputs(securityType);
      for (AvailableOutput availableOutput : positionOutputs) {
        valueNamesBySecurityType.put(securityType, availableOutput.getValueName());
      }
    }
    return valueNamesBySecurityType.asMap();
  }
Exemple #19
0
 @Override
 public JsonElement serialize(
     BindsConfig src, Type typeOfSrc, JsonSerializationContext context) {
   JsonObject result = new JsonObject();
   SetMultimap<Name, SimpleUri> bindByModule = HashMultimap.create();
   for (SimpleUri key : src.data.keySet()) {
     bindByModule.put(key.getModuleName(), key);
   }
   List<Name> sortedModules = Lists.newArrayList(bindByModule.keySet());
   Collections.sort(sortedModules);
   for (Name moduleId : sortedModules) {
     SetMultimap<String, Input> moduleBinds = HashMultimap.create();
     for (SimpleUri bindUri : bindByModule.get(moduleId)) {
       moduleBinds.putAll(bindUri.getObjectName().toString(), src.data.get(bindUri));
     }
     JsonElement map = context.serialize(moduleBinds, SetMultimap.class);
     result.add(moduleId.toString(), map);
   }
   return result;
 }
  public HttpRemoteTask(
      Session session,
      TaskId taskId,
      String nodeId,
      URI location,
      PlanFragment planFragment,
      Multimap<PlanNodeId, Split> initialSplits,
      OutputBuffers outputBuffers,
      HttpClient httpClient,
      Executor executor,
      ScheduledExecutorService errorScheduledExecutor,
      Duration minErrorDuration,
      Duration refreshMaxWait,
      JsonCodec<TaskInfo> taskInfoCodec,
      JsonCodec<TaskUpdateRequest> taskUpdateRequestCodec,
      SplitCountChangeListener splitCountChangeListener) {
    requireNonNull(session, "session is null");
    requireNonNull(taskId, "taskId is null");
    requireNonNull(nodeId, "nodeId is null");
    requireNonNull(location, "location is null");
    requireNonNull(planFragment, "planFragment1 is null");
    requireNonNull(outputBuffers, "outputBuffers is null");
    requireNonNull(httpClient, "httpClient is null");
    requireNonNull(executor, "executor is null");
    requireNonNull(taskInfoCodec, "taskInfoCodec is null");
    requireNonNull(taskUpdateRequestCodec, "taskUpdateRequestCodec is null");
    requireNonNull(splitCountChangeListener, "splitCountChangeListener is null");

    try (SetThreadName ignored = new SetThreadName("HttpRemoteTask-%s", taskId)) {
      this.taskId = taskId;
      this.session = session;
      this.nodeId = nodeId;
      this.planFragment = planFragment;
      this.outputBuffers.set(outputBuffers);
      this.httpClient = httpClient;
      this.executor = executor;
      this.errorScheduledExecutor = errorScheduledExecutor;
      this.taskInfoCodec = taskInfoCodec;
      this.taskUpdateRequestCodec = taskUpdateRequestCodec;
      this.updateErrorTracker =
          new RequestErrorTracker(
              taskId, location, minErrorDuration, errorScheduledExecutor, "updating task");
      this.getErrorTracker =
          new RequestErrorTracker(
              taskId, location, minErrorDuration, errorScheduledExecutor, "getting info for task");
      this.splitCountChangeListener = splitCountChangeListener;

      for (Entry<PlanNodeId, Split> entry :
          requireNonNull(initialSplits, "initialSplits is null").entries()) {
        ScheduledSplit scheduledSplit =
            new ScheduledSplit(nextSplitId.getAndIncrement(), entry.getValue());
        pendingSplits.put(entry.getKey(), scheduledSplit);
      }
      if (initialSplits.containsKey(planFragment.getPartitionedSource())) {
        pendingSourceSplitCount = initialSplits.get(planFragment.getPartitionedSource()).size();
        fireSplitCountChanged(pendingSourceSplitCount);
      }

      List<BufferInfo> bufferStates =
          outputBuffers
              .getBuffers()
              .keySet()
              .stream()
              .map(outputId -> new BufferInfo(outputId, false, 0, 0, PageBufferInfo.empty()))
              .collect(toImmutableList());

      TaskStats taskStats = new TaskStats(DateTime.now(), null);

      taskInfo =
          new StateMachine<>(
              "task " + taskId,
              executor,
              new TaskInfo(
                  taskId,
                  Optional.empty(),
                  TaskInfo.MIN_VERSION,
                  TaskState.PLANNED,
                  location,
                  DateTime.now(),
                  new SharedBufferInfo(BufferState.OPEN, true, true, 0, 0, 0, 0, bufferStates),
                  ImmutableSet.<PlanNodeId>of(),
                  taskStats,
                  ImmutableList.<ExecutionFailureInfo>of()));

      continuousTaskInfoFetcher = new ContinuousTaskInfoFetcher(refreshMaxWait);
    }
  }
 public void addClosureMethod(Method method) {
   closureMethods.put(method.getName(), method);
 }
Exemple #22
0
 /**
  * Adds a leaf node to the network.
  *
  * @param networkNode Definition of the leaf node position and connecting sides.
  */
 public void addLeafNode(NetworkNode networkNode) {
   if (SANITY_CHECK && (!canAddLeafNode(networkNode) || isEmptyNetwork()))
     throw new IllegalStateException("Unable to add this node to network");
   leafNodes.put(networkNode.location, networkNode);
   distanceCache.clear();
 }
Exemple #23
0
 /** Adds a type which is implicitly imported into the current compilation unit. */
 public Builder addImplicitImport(TypeReference type) {
   implicitImports.put(type.getSimpleName(), type);
   return this;
 }
Exemple #24
0
 /**
  * Adds a networking node to the network.
  *
  * @param networkNode Definition of the networking node position and connecting sides.
  */
 public void addNetworkingNode(NetworkNode networkNode) {
   if (SANITY_CHECK && !canAddNetworkingNode(networkNode))
     throw new IllegalStateException("Unable to add this node to network");
   networkingNodes.put(networkNode.location, networkNode);
   distanceCache.clear();
 }
  private ARGState relocateRefinementRoot(
      final ARGState pRefinementRoot, final boolean predicatePrecisionIsAvailable)
      throws InterruptedException {

    // no relocation needed if only running value analysis,
    // because there, this does slightly degrade performance
    // when running VA+PA, merging/covering and refinements
    // of both CPAs could lead to the state, where in two
    // subsequent refinements, two identical error paths
    // were found, through different parts of the ARG
    // So now, when running VA+PA, the refinement root
    // is set to the lowest common ancestor of those states
    // that are covered by the states in the subtree of the
    // original refinement root
    if (!predicatePrecisionIsAvailable) {
      return pRefinementRoot;
    }

    // no relocation needed if restart at top
    if (restartStrategy == RestartStrategy.ROOT) {
      return pRefinementRoot;
    }

    Set<ARGState> descendants = pRefinementRoot.getSubgraph();
    Set<ARGState> coveredStates = new HashSet<>();
    shutdownNotifier.shutdownIfNecessary();
    for (ARGState descendant : descendants) {
      coveredStates.addAll(descendant.getCoveredByThis());
    }
    coveredStates.add(pRefinementRoot);

    // no relocation needed if set of descendants is closed under coverage
    if (descendants.containsAll(coveredStates)) {
      return pRefinementRoot;
    }

    Map<ARGState, ARGState> predecessorRelation = Maps.newHashMap();
    SetMultimap<ARGState, ARGState> successorRelation = LinkedHashMultimap.create();

    Deque<ARGState> todo = new ArrayDeque<>(coveredStates);
    ARGState coverageTreeRoot = null;

    // build the coverage tree, bottom-up, starting from the covered states
    while (!todo.isEmpty()) {
      shutdownNotifier.shutdownIfNecessary();
      final ARGState currentState = todo.removeFirst();

      if (currentState.getParents().iterator().hasNext()) {
        ARGState parentState = currentState.getParents().iterator().next();
        todo.add(parentState);
        predecessorRelation.put(currentState, parentState);
        successorRelation.put(parentState, currentState);

      } else if (coverageTreeRoot == null) {
        coverageTreeRoot = currentState;
      }
    }

    // starting from the root of the coverage tree,
    // the new refinement root is either the first node
    // having two or more children, or the original
    // refinement root, what ever comes first
    shutdownNotifier.shutdownIfNecessary();
    ARGState newRefinementRoot = coverageTreeRoot;
    while (successorRelation.get(newRefinementRoot).size() == 1
        && newRefinementRoot != pRefinementRoot) {
      newRefinementRoot = Iterables.getOnlyElement(successorRelation.get(newRefinementRoot));
    }

    rootRelocations.inc();
    return newRefinementRoot;
  }