private static boolean checkDependants(
      final IdeaPluginDescriptor pluginDescriptor,
      final Function<PluginId, IdeaPluginDescriptor> pluginId2Descriptor,
      final Condition<PluginId> check,
      final Set<PluginId> processed) {
    processed.add(pluginDescriptor.getPluginId());
    final PluginId[] dependentPluginIds = pluginDescriptor.getDependentPluginIds();
    final Set<PluginId> optionalDependencies =
        new HashSet<PluginId>(Arrays.asList(pluginDescriptor.getOptionalDependentPluginIds()));
    for (final PluginId dependentPluginId : dependentPluginIds) {
      if (processed.contains(dependentPluginId)) continue;

      // TODO[yole] should this condition be a parameter?
      if (isModuleDependency(dependentPluginId)
          && (ourAvailableModules.isEmpty()
              || ourAvailableModules.contains(dependentPluginId.getIdString()))) {
        continue;
      }
      if (!optionalDependencies.contains(dependentPluginId)) {
        if (!check.value(dependentPluginId)) {
          return false;
        }
        final IdeaPluginDescriptor dependantPluginDescriptor =
            pluginId2Descriptor.fun(dependentPluginId);
        if (dependantPluginDescriptor != null
            && !checkDependants(dependantPluginDescriptor, pluginId2Descriptor, check, processed)) {
          return false;
        }
      }
    }
    return true;
  }
Ejemplo n.º 2
0
  private void checkStartup(
      Map<String, ServiceData> map,
      List<ServiceData> start,
      ServiceData sd,
      Set<ServiceData> cyclic) {
    if (sd.after.isEmpty() || start.contains(sd)) return;

    if (cyclic.contains(sd)) {
      reporter.error("Cyclic dependency for " + sd.name);
      return;
    }

    cyclic.add(sd);

    for (String dependsOn : sd.after) {
      if (dependsOn.equals("boot")) continue;

      ServiceData deps = map.get(dependsOn);
      if (deps == null) {
        reporter.error("No such service " + dependsOn + " but " + sd.name + " depends on it");
      } else {
        checkStartup(map, start, deps, cyclic);
      }
    }
    start.add(sd);
  }
Ejemplo n.º 3
0
 // look up and apply coarts for w to each sign in result
 @SuppressWarnings("unchecked")
 private void applyCoarts(Word w, SignHash result) throws LexException {
   List<Sign> inputSigns = new ArrayList<Sign>(result.asSignSet());
   result.clear();
   List<Sign> outputSigns = new ArrayList<Sign>(inputSigns.size());
   // for each surface attr, lookup coarts and apply to input signs, storing results in output
   // signs
   for (Iterator<Pair<String, String>> it = w.getSurfaceAttrValPairs(); it.hasNext(); ) {
     Pair<String, String> p = it.next();
     String attr = (String) p.a;
     if (!_indexedCoartAttrs.contains(attr)) continue;
     String val = (String) p.b;
     Word coartWord = Word.createWord(attr, val);
     SignHash coartResult = getSignsFromWord(coartWord, null, null, null);
     for (Iterator<Sign> it2 = coartResult.iterator(); it2.hasNext(); ) {
       Sign coartSign = it2.next();
       // apply to each input
       for (int j = 0; j < inputSigns.size(); j++) {
         Sign sign = inputSigns.get(j);
         grammar.rules.applyCoart(sign, coartSign, outputSigns);
       }
     }
     // switch output to input for next iteration
     inputSigns.clear();
     inputSigns.addAll(outputSigns);
     outputSigns.clear();
   }
   // add results back
   result.addAll(inputSigns);
 }
Ejemplo n.º 4
0
  /**
   * Parse a repository document.
   *
   * @param url
   * @throws IOException
   * @throws XmlPullParserException
   * @throws Exception
   */
  void parseDocument(URL url) throws IOException, XmlPullParserException, Exception {
    if (!visited.contains(url)) {
      visited.add(url);
      try {
        System.out.println("Visiting: " + url);
        InputStream in = null;

        if (url.getPath().endsWith(".zip")) {
          ZipInputStream zin = new ZipInputStream(url.openStream());
          ZipEntry entry = zin.getNextEntry();
          while (entry != null) {
            if (entry.getName().equals("repository.xml")) {
              in = zin;
              break;
            }
            entry = zin.getNextEntry();
          }
        } else {
          in = url.openStream();
        }
        Reader reader = new InputStreamReader(in);
        XmlPullParser parser = new KXmlParser();
        parser.setInput(reader);
        parseRepository(parser);
      } catch (MalformedURLException e) {
        System.out.println("Cannot create connection to url");
      }
    }
  }
Ejemplo n.º 5
0
  protected boolean ignoreFile(String file) {
    if (ignore_set.contains(file.toLowerCase())) {

      report("Torrent.create.progress.ignoringfile", " '" + file + "'");

      return (true);
    }

    return (false);
  }
Ejemplo n.º 6
0
    public void handleStreamEvent(StreamEvent event) {
      if (event.eventType == StreamEvent.Type.STREAM_PREPARED) {
        SessionInfo session = ((StreamEvent.SessionPreparedEvent) event).session;
        sessionsByHost.put(session.peer, session);
      } else if (event.eventType == StreamEvent.Type.FILE_PROGRESS) {
        ProgressInfo progressInfo = ((StreamEvent.ProgressEvent) event).progress;

        // update progress
        Set<ProgressInfo> progresses = progressByHost.get(progressInfo.peer);
        if (progresses == null) {
          progresses = Sets.newSetFromMap(new ConcurrentHashMap<ProgressInfo, Boolean>());
          progressByHost.put(progressInfo.peer, progresses);
        }
        if (progresses.contains(progressInfo)) progresses.remove(progressInfo);
        progresses.add(progressInfo);

        StringBuilder sb = new StringBuilder();
        sb.append("\rprogress: ");

        long totalProgress = 0;
        long totalSize = 0;
        for (Map.Entry<InetAddress, Set<ProgressInfo>> entry : progressByHost.entrySet()) {
          SessionInfo session = sessionsByHost.get(entry.getKey());

          long size = session.getTotalSizeToSend();
          long current = 0;
          int completed = 0;
          for (ProgressInfo progress : entry.getValue()) {
            if (progress.currentBytes == progress.totalBytes) completed++;
            current += progress.currentBytes;
          }
          totalProgress += current;
          totalSize += size;
          sb.append("[").append(entry.getKey());
          sb.append(" ").append(completed).append("/").append(session.getTotalFilesToSend());
          sb.append(" (").append(size == 0 ? 100L : current * 100L / size).append("%)] ");
        }
        long time = System.nanoTime();
        long deltaTime = Math.max(1L, TimeUnit.NANOSECONDS.toMillis(time - lastTime));
        lastTime = time;
        long deltaProgress = totalProgress - lastProgress;
        lastProgress = totalProgress;

        sb.append("[total: ")
            .append(totalSize == 0 ? 100L : totalProgress * 100L / totalSize)
            .append("% - ");
        sb.append(mbPerSec(deltaProgress, deltaTime)).append("MB/s");
        sb.append(" (avg: ")
            .append(mbPerSec(totalProgress, TimeUnit.NANOSECONDS.toMillis(time - start)))
            .append("MB/s)]");

        System.out.print(sb.toString());
      }
    }
Ejemplo n.º 7
0
  public static Statement replaceNamespace(
      Statement st, String o, String n, NodeFactory f, Map o2n, Set resourcesToIgnore)
      throws ModelException {

    boolean replaced = false;
    Resource subj = st.subject();
    Resource pred = st.predicate();
    RDFNode obj = st.object();

    if (obj instanceof Resource
        && !(obj instanceof Statement)
        && o.equals(((Resource) obj).getNamespace())
        && (resourcesToIgnore == null || !resourcesToIgnore.contains(obj))) {

      replaced = true;
      Resource r = f.createResource(n, ((Resource) obj).getLocalName());
      if (o2n != null) o2n.put(obj, r);
      obj = r;
    }

    if (o.equals(subj.getNamespace())
        && (resourcesToIgnore == null || !resourcesToIgnore.contains(subj))) {

      replaced = true;
      Resource r = f.createResource(n, subj.getLocalName());
      if (o2n != null) o2n.put(subj, r);
      subj = r;
    }

    if (o.equals(pred.getNamespace())
        && (resourcesToIgnore == null || !resourcesToIgnore.contains(pred))) {

      replaced = true;
      Resource r = f.createResource(n, pred.getLocalName());
      if (o2n != null) o2n.put(pred, r);
      pred = r;
    }
    return replaced ? f.createStatement(subj, pred, obj) : st;
  }
  private static String[] findStandardMBeans(URL codeBase) throws Exception {
    Set<String> names;
    if (codeBase.getProtocol().equalsIgnoreCase("file") && codeBase.toString().endsWith("/"))
      names = findStandardMBeansFromDir(codeBase);
    else names = findStandardMBeansFromJar(codeBase);

    Set<String> standardMBeanNames = new TreeSet<String>();
    for (String name : names) {
      if (name.endsWith("MBean")) {
        String prefix = name.substring(0, name.length() - 5);
        if (names.contains(prefix)) standardMBeanNames.add(prefix);
      }
    }
    return standardMBeanNames.toArray(new String[0]);
  }
    public void run() {
      super.run();

      // save the last set of active JVMs
      Set lastActiveVms = activeVms;

      try {
        // get the current set of active JVMs
        activeVms = (HashSet) vmManager.activeVms();

      } catch (MonitorException e) {
        // XXX: use logging api
        System.err.println("MonitoredHostProvider: polling task " + "caught MonitorException:");
        e.printStackTrace();

        // mark the HostManager as errored and notify listeners
        setLastException(e);
        fireDisconnectedEvents();
      }

      if (activeVms.isEmpty()) {
        return;
      }

      Set startedVms = new HashSet();
      Set terminatedVms = new HashSet();

      for (Iterator i = activeVms.iterator(); i.hasNext(); /* empty */ ) {
        Integer vmid = (Integer) i.next();
        if (!lastActiveVms.contains(vmid)) {
          // a new file has been detected, add to set
          startedVms.add(vmid);
        }
      }

      for (Iterator i = lastActiveVms.iterator(); i.hasNext();
      /* empty */ ) {
        Object o = i.next();
        if (!activeVms.contains(o)) {
          // JVM has terminated, remove it from the active list
          terminatedVms.add(o);
        }
      }

      if (!startedVms.isEmpty() || !terminatedVms.isEmpty()) {
        fireVmStatusChangedEvents(activeVms, startedVms, terminatedVms);
      }
    }
Ejemplo n.º 10
0
  /**
   * Writes the Graph's statements as RDF/XML to the print Writer.
   *
   * @param out PrintWriter
   * @throws IOException
   */
  protected void writeBody(Graph graph, PrintWriter out) throws IOException, GraphException {

    // validate
    if ((out != null) && (graph != null)) {

      // iterator used to access subjects
      ClosableIterator<Triple> subjectIter = graph.find(null, null, null);

      // write every (unique) subject
      if (subjectIter != null) {

        Set<SubjectNode> writtenSubjectSet = new HashSet<SubjectNode>();

        while (subjectIter.hasNext()) {

          // get the next triple
          Triple triple = subjectIter.next();

          if (triple != null) {

            SubjectNode subject = ((Triple) triple).getSubject();

            if (!writtenSubjectSet.contains(subject)) {
              this.writeSubject(graph, subject, out);
              writtenSubjectSet.add(subject);
            }
          }
        }

        // close the Iterator
        subjectIter.close();
      }
    } else {

      // message for exception to be thrown
      String message = "Could not write Graph. Invlaid arguments provided. ";

      if (out == null) message += "Writer is null. ";
      if (graph == null) message += "Graph is null. ";

      throw new IllegalArgumentException(message);
    }
  }
Ejemplo n.º 11
0
  /**
   * Handle the discovery of new peers.
   *
   * @param peers The list of peers discovered (from the announce response or any other means like
   *     DHT/PEX, etc.).
   */
  @Override
  public void handleDiscoveredPeers(List<Peer> peers, String hexInfoHash) {
    logger.info("Got {} peer(s) in tracker response", peers.size());

    if (!this.service.isAlive()) {
      logger.warn("Connection handler service is not available.");
      return;
    }

    Set<SharingPeer> foundPeers = new HashSet<SharingPeer>();
    for (Peer peer : peers) {
      SharingPeer match = this.getOrCreatePeer(peer, hexInfoHash);
      foundPeers.add(match);

      synchronized (match) {
        // Attempt to connect to the peer if and only if:
        //   - We're not already connected to it;
        //   - We're not a seeder (we leave the responsibility
        //	   of connecting to peers that need to download
        //     something), or we are a seeder but we're still
        //     willing to initiate some out bound connections.
        if (match.isConnected() || this.isSeed(hexInfoHash) || match.getTorrent().isFinished()) {
          continue;
        }

        this.service.connect(match);
      }
    }

    List<SharingPeer> toRemove = new ArrayList<SharingPeer>();
    for (SharingPeer peer : this.peers) {
      if (peer.getTorrentHexInfoHash().equals(hexInfoHash)
          && !foundPeers.contains(peer)
          && !peer.isConnected()) {
        toRemove.add(peer);
      }
    }

    this.peers.removeAll(toRemove);
    for (SharingPeer peer : toRemove) {
      peer.unbind(true);
    }
  }
Ejemplo n.º 12
0
  @RequestMapping(value = VIDEO_SEARCH_PATH, method = RequestMethod.GET)
  public @ResponseBody String[] searchVideo(
      @RequestParam(value = "username") String uName,
      @RequestParam(value = "video") String videoHash,
      HttpServletResponse response) {
    System.out.println("Search from:" + uName);
    if (!user_vidNameMap.containsKey(uName)) {
      response.setStatus(402); // client not connected
      return null;
    }

    Set<String> users = vidName_UserMap.get(videoHash);

    if (users == null) {
      System.out.println("Srearching main server\n");
      try {
        users = masterService.psSearch(hostAdder, videoHash);
      } catch (Exception e) {
        System.err.println(e.getMessage());
        return null;
      }
      if (users == null) return null;
      if (vidName_UserMap.containsKey(videoHash)) {
        vidName_UserMap.get(videoHash).addAll(users);
      } else {
        Set<String> s = new HashSet<String>();
        s.addAll(users);
        vidName_UserMap.put(videoHash, s);
      }
    } else {
      Iterator<String> it = users.iterator();
      while (it.hasNext()) {
        String temp = it.next();
        if (!activeUsers.contains(temp)) {
          it.remove();
        }
      }
    }
    System.out.println("Search result : " + Arrays.asList(users.toArray(new String[0])));
    // String [] a = new String[]
    return users.toArray(new String[0]);
  }
Ejemplo n.º 13
0
 public void forall(Category c) {
   // get feature structures
   if (!(c instanceof AtomCat)) return;
   String type = ((AtomCat) c).getType();
   FeatureStructure fs = c.getFeatureStructure();
   GFeatStruc gfs = (GFeatStruc) fs;
   if (gfs == null || gfs.getInheritsFrom() == 0) return;
   int inhf = gfs.getInheritsFrom();
   FeatureStructure inhfFS = (FeatureStructure) featStrucMap.get(inhf);
   if (inhfFS != null) {
     // copy values of features from inhfFS not already present
     for (Iterator<String> it = inhfFS.getAttributes().iterator(); it.hasNext(); ) {
       String att = it.next();
       if (gfs.hasAttribute(att)) continue;
       gfs.setFeature(att, UnifyControl.copy(inhfFS.getValue(att)));
     }
     // for each possible attr used with this type and not already present,
     // add feature equation
     Collection<String> attrs = (Collection<String>) _catsToAttrs.get(type);
     if (attrs == null) return;
     for (Iterator<String> it = attrs.iterator(); it.hasNext(); ) {
       String att = it.next();
       if (gfs.hasAttribute(att)) continue;
       String varName = att.toUpperCase() + inhf;
       if (_lfAttrs.contains(att)) {
         gfs.setFeature(att, new HyloVar(varName));
         inhfFS.setFeature(att, new HyloVar(varName));
       } else {
         gfs.setFeature(att, new GFeatVar(varName));
         inhfFS.setFeature(att, new GFeatVar(varName));
       }
     }
   } else {
     System.err.println(
         "Warning: no feature structure with inheritsFrom index of "
             + inhf
             + " found in category "
             + c);
   }
 }
Ejemplo n.º 14
0
 public static boolean isLocalEndPoint(EndPoint ep) {
   return (endPoints_.contains(ep));
 }
  public void saveIncludedBundleState() {
    if (fFeaureModeButton.getSelection()) {
      // Create a list of checked bundle infos
      List included = new ArrayList();
      int missingCount = 0;
      Object[] checked = fTree.getCheckedLeafElements();
      for (int i = 0; i < checked.length; i++) {
        if (checked[i] instanceof IFeatureModel) {
          included.add(
              new NameVersionDescriptor(
                  ((IFeatureModel) checked[i]).getFeature().getId(),
                  null,
                  NameVersionDescriptor.TYPE_FEATURE));
        }
        if (checked[i] instanceof IResolvedBundle) {
          // Missing features are included as IResolvedBundles, save them as features instead
          if (((IResolvedBundle) checked[i]).getStatus().getCode()
              == IResolvedBundle.STATUS_PLUGIN_DOES_NOT_EXIST) {
            included.add(
                new NameVersionDescriptor(
                    ((IResolvedBundle) checked[i]).getBundleInfo().getSymbolicName(),
                    null,
                    NameVersionDescriptor.TYPE_PLUGIN));
            missingCount++;
          } else if (((IResolvedBundle) checked[i]).getStatus().getCode()
              == IResolvedBundle.STATUS_FEATURE_DOES_NOT_EXIST) {
            included.add(
                new NameVersionDescriptor(
                    ((IResolvedBundle) checked[i]).getBundleInfo().getSymbolicName(),
                    null,
                    NameVersionDescriptor.TYPE_FEATURE));
            missingCount++;
          } else {
            included.add(
                new NameVersionDescriptor(
                    ((IResolvedBundle) checked[i]).getBundleInfo().getSymbolicName(), null));
          }
        }
      }

      if (included.size() == 0) {
        fTargetDefinition.setIncluded(new NameVersionDescriptor[0]);
      } else if (included.size() == 0
          || included.size() - missingCount
              == fTargetDefinition.getAllFeatures().length
                  + ((TargetDefinition) fTargetDefinition).getOtherBundles().length) {
        fTargetDefinition.setIncluded(null);
      } else {
        fTargetDefinition.setIncluded(
            (NameVersionDescriptor[]) included.toArray(new NameVersionDescriptor[included.size()]));
      }
    } else {
      // Figure out if there are multiple bundles sharing the same id
      Set multi = new HashSet(); // BSNs of bundles with multiple versions available
      Set all = new HashSet();
      for (Iterator iterator = fAllBundles.iterator(); iterator.hasNext(); ) {
        IResolvedBundle rb = (IResolvedBundle) iterator.next();
        if (!all.add(rb.getBundleInfo().getSymbolicName())) {
          multi.add(rb.getBundleInfo().getSymbolicName());
        }
      }

      // Create a list of checked bundle infos
      List included = new ArrayList();
      Object[] checked = fTree.getCheckedLeafElements();
      for (int i = 0; i < checked.length; i++) {
        if (checked[i] instanceof IResolvedBundle) {
          // Create the bundle info object
          String bsn = ((IResolvedBundle) checked[i]).getBundleInfo().getSymbolicName();
          NameVersionDescriptor info = null;
          if (multi.contains(bsn)) {
            // include version info
            info =
                new NameVersionDescriptor(
                    bsn, ((IResolvedBundle) checked[i]).getBundleInfo().getVersion());
          } else {
            // don't store version info
            info = new NameVersionDescriptor(bsn, null);
          }
          included.add(info);
        }
      }

      if (included.size() == 0) {
        fTargetDefinition.setIncluded(new NameVersionDescriptor[0]);
      } else if (included.size() == fAllBundles.size() + fMissing.size()) {
        fTargetDefinition.setIncluded(null);
      } else {
        fTargetDefinition.setIncluded(
            (NameVersionDescriptor[]) included.toArray(new NameVersionDescriptor[included.size()]));
      }
    }
  }
Ejemplo n.º 16
0
  // given MorphItem
  private void getWithMorphItem(
      Word w, MorphItem mi, String targetPred, String targetRel, SignHash result)
      throws LexException {
    // get supertags for filtering, if a supertagger is installed
    Map<String, Double> supertags = null;
    Set<String> supertagsFound = null;
    if (_supertagger != null) {
      supertags = _supertagger.getSupertags();
      if (supertags != null) supertagsFound = new HashSet<String>(supertags.size());
    }

    // get macro adder
    MacroAdder macAdder = getMacAdder(mi);

    // if we have this stem in our lexicon
    String stem = mi.getWord().getStem();
    String pos = mi.getWord().getPOS();
    Set<EntriesItem[]> explicitEntries =
        null; // for storing entries from explicitly listed family members
    if (_stems.containsKey(stem + pos)) {
      explicitEntries = new HashSet<EntriesItem[]>();
      Collection<Object> stemItems = (Collection<Object>) _stems.get(stem + pos);
      for (Iterator<Object> I = stemItems.iterator(); I.hasNext(); ) {
        Object item = I.next();
        // see if it's an EntriesItem
        if (item instanceof EntriesItem) {
          EntriesItem entry = (EntriesItem) item;
          // do lookup
          getWithEntriesItem(
              w,
              mi,
              stem,
              stem,
              targetPred,
              targetRel,
              entry,
              macAdder,
              supertags,
              supertagsFound,
              result);
        }
        // otherwise it has to be a Pair containing a DataItem and
        // an EntriesItem[]
        else {
          @SuppressWarnings("rawtypes")
          DataItem dItem = (DataItem) ((Pair) item).a;
          @SuppressWarnings("rawtypes")
          EntriesItem[] entries = (EntriesItem[]) ((Pair) item).b;
          // store entries
          explicitEntries.add(entries);
          // do lookup
          getWithDataItem(
              w,
              mi,
              dItem,
              entries,
              targetPred,
              targetRel,
              macAdder,
              supertags,
              supertagsFound,
              result);
        }
      }
    }

    // for entries that are not explicitly in the lexicon file, we have to create
    // Signs from the open class entries with the appropriate part-of-speech
    Collection<EntriesItem[]> entrySets = (Collection<EntriesItem[]>) _posToEntries.get(pos);
    if (entrySets != null) {
      for (Iterator<EntriesItem[]> E = entrySets.iterator(); E.hasNext(); ) {
        EntriesItem[] entries = E.next();
        // skip if entries explicitly listed
        if (explicitEntries != null && explicitEntries.contains(entries)) continue;
        // otherwise get entries with pred = targetPred, or stem if null
        String pred = (targetPred != null) ? targetPred : stem;
        getWithDataItem(
            w,
            mi,
            new DataItem(stem, pred),
            entries,
            targetPred,
            targetRel,
            macAdder,
            supertags,
            supertagsFound,
            result);
      }
    }

    // finally do entries for any remaining supertags
    if (supertags != null) {
      for (String supertag : supertags.keySet()) {
        if (supertagsFound.contains(supertag)) continue;
        Set<EntriesItem> entries = _stagToEntries.get(supertag + pos);
        if (entries == null) continue; // nb: could be a POS mismatch
        // get entries with pred = targetPred, or stem if null
        String pred = (targetPred != null) ? targetPred : stem;
        for (EntriesItem entry : entries) {
          if (!entry.getStem().equals(DEFAULT_VAL)) continue;
          getWithEntriesItem(
              w,
              mi,
              stem,
              pred,
              targetPred,
              targetRel,
              entry,
              macAdder,
              supertags,
              supertagsFound,
              result);
        }
      }
    }
  }
  /**
   * Uses the target state to determine all bundles required by the currently checked bundles and
   * returns them so they can be checked in the tree.
   *
   * @param allBundles list of all bundles to search requirements in
   * @param checkedBundles list of bundles to get requirements for
   * @return list of resolved bundles from the collection to be checked
   */
  private Object[] getRequiredPlugins(final Collection allBundles, final Object[] checkedBundles) {
    final Set dependencies = new HashSet();
    IRunnableWithProgress op =
        new IRunnableWithProgress() {
          public void run(IProgressMonitor monitor) {
            try {
              monitor.beginTask(Messages.TargetContentsGroup_5, 150);

              // Get all the bundle locations
              List allLocations = new ArrayList(allBundles.size());
              for (Iterator iterator = allBundles.iterator(); iterator.hasNext(); ) {
                IResolvedBundle current = (IResolvedBundle) iterator.next();
                try {
                  // Some bundles, such as those with errors, may not have locations
                  URI location = current.getBundleInfo().getLocation();
                  if (location != null) {
                    allLocations.add(new File(location).toURL());
                  }
                } catch (MalformedURLException e) {
                  PDEPlugin.log(e);
                  monitor.setCanceled(true);
                  return;
                }
              }
              if (monitor.isCanceled()) {
                return;
              }
              monitor.worked(20);

              // Create a PDE State containing all of the target bundles
              PDEState state =
                  new PDEState(
                      (URL[]) allLocations.toArray(new URL[allLocations.size()]),
                      true,
                      new SubProgressMonitor(monitor, 50));
              if (monitor.isCanceled()) {
                return;
              }

              // Figure out which of the models have been checked
              IPluginModelBase[] models = state.getTargetModels();
              List checkedModels = new ArrayList(checkedBundles.length);
              for (int i = 0; i < checkedBundles.length; i++) {
                if (checkedBundles[i] instanceof IResolvedBundle) {
                  BundleInfo bundle = ((IResolvedBundle) checkedBundles[i]).getBundleInfo();
                  for (int j = 0; j < models.length; j++) {
                    if (models[j]
                            .getBundleDescription()
                            .getSymbolicName()
                            .equals(bundle.getSymbolicName())
                        && models[j]
                            .getBundleDescription()
                            .getVersion()
                            .toString()
                            .equals(bundle.getVersion())) {
                      checkedModels.add(models[j]);
                      break;
                    }
                  }
                }
              }
              monitor.worked(20);
              if (monitor.isCanceled()) {
                return;
              }

              // Get implicit dependencies as a list of strings
              // This is wasteful since the dependency calculation puts them back into BundleInfos
              NameVersionDescriptor[] implicitDependencies =
                  fTargetDefinition.getImplicitDependencies();
              List implicitIDs = new ArrayList();
              if (implicitDependencies != null) {
                for (int i = 0; i < implicitDependencies.length; i++) {
                  implicitIDs.add(implicitDependencies[i].getId());
                }
              }
              monitor.worked(10);

              // Get all dependency bundles
              // exclude "org.eclipse.ui.workbench.compatibility" - it is only needed for pre-3.0
              // bundles
              dependencies.addAll(
                  DependencyManager.getDependencies(
                      checkedModels.toArray(),
                      (String[]) implicitIDs.toArray(new String[implicitIDs.size()]),
                      state.getState(),
                      new String[] {"org.eclipse.ui.workbench.compatibility"})); // $NON-NLS-1$
              monitor.worked(50);

            } finally {
              monitor.done();
            }
          }
        };
    try {
      // Calculate the dependencies
      new ProgressMonitorDialog(fTree.getControl().getShell()).run(true, true, op);

      // We want to check the dependents, the source of the dependents, and the source of the
      // originally checked
      Set checkedNames = new HashSet(checkedBundles.length);
      for (int i = 0; i < checkedBundles.length; i++) {
        if (checkedBundles[i] instanceof IResolvedBundle) {
          checkedNames.add(((IResolvedBundle) checkedBundles[i]).getBundleInfo().getSymbolicName());
        }
      }

      List toCheck = new ArrayList();
      for (Iterator iterator = fAllBundles.iterator(); iterator.hasNext(); ) {
        IResolvedBundle bundle = (IResolvedBundle) iterator.next();
        if (bundle.isSourceBundle()) {
          String name = bundle.getSourceTarget().getSymbolicName();
          if (name != null && (dependencies.contains(name) || checkedNames.contains(name))) {
            toCheck.add(bundle);
          }
        } else if (dependencies.contains(bundle.getBundleInfo().getSymbolicName())) {
          toCheck.add(bundle);
        }
      }
      return toCheck.toArray();
    } catch (InvocationTargetException e) {
      PDEPlugin.log(e);
    } catch (InterruptedException e) {
    }

    return new Object[0];
  }