Example #1
0
  public ModuleManager(Receptor rec) {
    mlce = new ModuleListChangedEvent(rec);
    mae = new ModuleAddedEvent(rec);
    mre = new ModuleRemovedEvent(rec);

    recOut = rec.getOutComm();
    dbReportListeners = new LinkedList();
    listeners = new LinkedList();
    receptor = rec;
    modToHandle = new HashMap();
    hashToModule = new HashMap();
    handleToAnalMod = new HashMap();
    handles = new LinkedList();
    nameToFInst = new HashMap();

    downloadSet = Collections.synchronizedSet(new HashSet());
    checkingSet = Collections.synchronizedSet(new HashSet());
    phantomModules = Collections.synchronizedList(new LinkedList());

    ModuleLinker linker = receptor.getLinker();
    Iterator it = linker.getDescriptors().iterator();
    while (it.hasNext()) {
      ModuleDescriptor desc = (ModuleDescriptor) it.next();
      if (desc.getType() == Descriptor.TYPE_FLOW) {
        loadFlowModule(desc);
      }
    }

    refresh = Settings.v().getRefreshInterval();
    Settings.v().addConfigChangeListener(this);
    receptor.getLinker().addModuleLinkerListener(this);

    (refThread = new Thread(this, "Module Manager")).start();
  }
 /**
  * During recovery we need to duplicate service info to reregister them
  *
  * @param info
  */
 ServiceInfoImpl(ServiceInfo info) {
   this._ipv4Addresses = Collections.synchronizedSet(new LinkedHashSet<Inet4Address>());
   this._ipv6Addresses = Collections.synchronizedSet(new LinkedHashSet<Inet6Address>());
   if (info != null) {
     this._domain = info.getDomain();
     this._protocol = info.getProtocol();
     this._application = info.getApplication();
     this._name = info.getName();
     this._subtype = info.getSubtype();
     this._port = info.getPort();
     this._weight = info.getWeight();
     this._priority = info.getPriority();
     this._text = info.getTextBytes();
     this._persistent = info.isPersistent();
     Inet6Address[] ipv6Addresses = info.getInet6Addresses();
     for (Inet6Address address : ipv6Addresses) {
       this._ipv6Addresses.add(address);
     }
     Inet4Address[] ipv4Addresses = info.getInet4Addresses();
     for (Inet4Address address : ipv4Addresses) {
       this._ipv4Addresses.add(address);
     }
   }
   this._state = new ServiceInfoState(this);
 }
Example #3
0
  public FManager() {

    loaded =
        Collections.synchronizedSet(
            new HashSet<File>() {
              @Override
              public boolean add(File e) {
                synchronized (loadedObserver) {
                  loadedObserver.setChanged();
                  loadedObserver.notifyObservers(e);
                }
                return super.add(e);
              }
            });
    pending = Collections.synchronizedSet(new HashSet<>());
    filterChange = new InvokeObservable(this);
    loadedObserver = new InvokeObservable(this);
    activityObserver = new InvokeObservable(this);
    autoLoad = true;
    verbose = true;
    activity = IDLE;
    stdErr = Optional.empty();
    stdOut = Optional.empty();
    FPC fpcObject = null;
    try {
      fpcObject = new FPC(DataDetails.class);
      fpcObject.onFilterChange(this::setActiveFilter);
    } catch (Exception ex) {
      Logger.getLogger(FManager.class.getName()).log(Level.SEVERE, null, ex);
    }
    fpc = fpcObject;
    checkProperties();
  }
  ServiceInfoImpl(
      Map<Fields, String> qualifiedNameMap,
      int port,
      int weight,
      int priority,
      boolean persistent,
      byte text[]) {
    Map<Fields, String> map = ServiceInfoImpl.checkQualifiedNameMap(qualifiedNameMap);

    this._domain = map.get(Fields.Domain);
    this._protocol = map.get(Fields.Protocol);
    this._application = map.get(Fields.Application);
    this._name = map.get(Fields.Instance);
    this._subtype = map.get(Fields.Subtype);

    this._port = port;
    this._weight = weight;
    this._priority = priority;
    this._text = text;
    this.setNeedTextAnnouncing(false);
    this._state = new ServiceInfoState(this);
    this._persistent = persistent;
    this._ipv4Addresses = Collections.synchronizedSet(new LinkedHashSet<Inet4Address>());
    this._ipv6Addresses = Collections.synchronizedSet(new LinkedHashSet<Inet6Address>());
  }
 Set<String> getInEdgesSet(final String label) {
   Set<String> edgeIds = getInEdgesMap().get(label);
   if (edgeIds == null) {
     Object o = getProperty(DominoVertex.IN_PREFIX + label, java.util.Collection.class);
     if (o != null) {
       if (o instanceof LinkedHashSet) {
         edgeIds = Collections.synchronizedSet((LinkedHashSet) o);
       } else if (o instanceof java.util.Collection) {
         edgeIds = Collections.synchronizedSet(new LinkedHashSet<String>((Collection<String>) o));
       } else {
         log_.log(
             Level.WARNING,
             "ALERT! InEdges for label "
                 + label
                 + " returned something other than a Collection "
                 + o.getClass().getName()
                 + " in vertex type "
                 + this.getForm()
                 + " id: "
                 + getId());
       }
     } else {
       edgeIds = Collections.synchronizedSet(new LinkedHashSet<String>());
     }
     Map map = getInEdgesMap();
     synchronized (map) {
       map.put(label, edgeIds);
     }
   }
   return edgeIds;
 }
  protected Set<Edge> getOutEdgeObjects(final String... labels) {
    Map<String, Set<Edge>> outCache = getOutEdgeCache();
    Set<Edge> result = null;

    if (labels == null || labels.length == 0) {
      result = new LinkedHashSet<Edge>();
      Set<String> labelSet = this.getOutEdgeLabels();
      //			System.out.println("INFO: Getting all OUT edges for a vertex across " + labelSet.size() +
      // " labels.");
      for (String label : labelSet) {
        Set<Edge> curEdges = getOutEdgeObjects(label);
        //				System.out.println("INFO: Found " + curEdges.size() + " OUT edges for label " +
        // label);
        result.addAll(curEdges);
      }
      //			System.out.println("INFO: Found " + result.size() + " OUT edges.");
    } else if (labels.length == 1) {
      String label = labels[0];
      if (label == null) {
        return Collections.unmodifiableSet(getOutEdgeObjects());
      }
      synchronized (outCache) {
        result = outCache.get(label);
      }
      if (result == null) {
        Set<String> edgeIds = getOutEdgesSet(label);
        Set<Edge> edges = getParent().getEdgesFromIds(edgeIds);
        if (edges != null) {
          result = Collections.synchronizedSet((LinkedHashSet) edges);
        }

        // result = Collections.synchronizedSet(new LinkedHashSet<Edge>());
        // Set<Edge> allEdges = Collections.unmodifiableSet(getOutEdgeObjects());
        // if (!allEdges.isEmpty()) {
        // for (Edge edge : allEdges) {
        // if (edge == null) {
        //
        // } else {
        // String curLabel = edge.getLabel();
        // if (label.equals(curLabel)) {
        // result.add(edge);
        // }
        // }
        // }
        // }

        synchronized (outCache) {
          outCache.put(label, result);
        }
      }
    } else {
      result = Collections.synchronizedSet(new LinkedHashSet<Edge>());
      for (String label : labels) {
        result.addAll(getOutEdgeObjects(label));
      }
    }
    return Collections.unmodifiableSet(result);
  }
 private IFolder addSourceArchive(
     IPath externalSourceArchivePath,
     IProject externalSourceArchivesProject,
     boolean scheduleForCreation) {
   Map<IPath, IResource> knownSourceArchives = getSourceArchives();
   Object existing = knownSourceArchives.get(externalSourceArchivePath);
   if (existing != null) {
     return (IFolder) existing;
   }
   IFolder result;
   int counter = 0;
   do {
     StringBuilder folderName = new StringBuilder(LINKED_FOLDER_NAME);
     if (counter > 0) {
       folderName.append(counter).append('-');
     }
     folderName.append(externalSourceArchivePath.lastSegment());
     result = externalSourceArchivesProject.getFolder(folderName.toString());
     counter++;
   } while (result.exists());
   if (scheduleForCreation) {
     synchronized (this) {
       if (pendingSourceArchives == null)
         pendingSourceArchives = Collections.synchronizedSet(new HashSet<IPath>());
     }
     pendingSourceArchives.add(externalSourceArchivePath);
   }
   knownSourceArchives.put(externalSourceArchivePath, result);
   return result;
 }
  /**
   * Creates an instance of an application object.
   *
   * @param jdebug {@link Jdebug} class
   * @param appID Identifier used to specify this app int commands.
   * @param vm Virtual machine in which this application is running.
   */
  public Application(Jdebug jdebug, Integer appID, VirtualMachine vm) {

    this.jdebug = jdebug;

    this.appID = appID;

    this.vm = vm;

    pendingCommands = Collections.synchronizedSet(new HashSet());

    store = new ObjectStore(this);

    eventHandler = new EventHandler(this);

    eventRequestSpecs = new EventRequestSpecList(this);

    // we need to raise all class prepare events to
    // make sure we resolve the corresponding specs.
    ClassPrepareRequest cprequest = vm.eventRequestManager().createClassPrepareRequest();

    // this (hack?) is used to identify if the user itself specified
    // a class prepare request, or the event was raised because of
    // this request.
    cprequest.putProperty("default", "default");
    cprequest.setSuspendPolicy(EventRequest.SUSPEND_ALL);
    cprequest.enable();

    // set up command handling classes.
    generalCommands = new GeneralCommands(this, store);
    specCommands = new SpecCommands(this, eventRequestSpecs, store);
    threadCommands = new ThreadCommands(this, store);
  }
  public void testFsShutdownHook() throws Exception {
    final Set<FileSystem> closed = Collections.synchronizedSet(new HashSet<FileSystem>());
    Configuration conf = new Configuration();
    Configuration confNoAuto = new Configuration();

    conf.setClass("fs.test.impl", TestShutdownFileSystem.class, FileSystem.class);
    confNoAuto.setClass("fs.test.impl", TestShutdownFileSystem.class, FileSystem.class);
    confNoAuto.setBoolean("fs.automatic.close", false);

    TestShutdownFileSystem fsWithAuto =
        (TestShutdownFileSystem) (new Path("test://a/").getFileSystem(conf));
    TestShutdownFileSystem fsWithoutAuto =
        (TestShutdownFileSystem) (new Path("test://b/").getFileSystem(confNoAuto));

    fsWithAuto.setClosedSet(closed);
    fsWithoutAuto.setClosedSet(closed);

    // Different URIs should result in different FS instances
    assertNotSame(fsWithAuto, fsWithoutAuto);

    FileSystem.CACHE.closeAll(true);
    assertEquals(1, closed.size());
    assertTrue(closed.contains(fsWithAuto));

    closed.clear();

    FileSystem.closeAll();
    assertEquals(1, closed.size());
    assertTrue(closed.contains(fsWithoutAuto));
  }
Example #10
0
  public DaemonicParserState(
      TypeCoercerFactory typeCoercerFactory,
      ConstructorArgMarshaller marshaller,
      int parsingThreads) {
    this.typeCoercerFactory = typeCoercerFactory;
    this.marshaller = marshaller;
    this.allRawNodes = new OptimisticLoadingCache<>(parsingThreads);
    this.targetsCornucopia = HashMultimap.create();
    this.allTargetNodes = new OptimisticLoadingCache<>(parsingThreads);
    this.hasCachedTargetNodeForBuildTargetPredicate =
        new Predicate<BuildTarget>() {
          @Override
          public boolean apply(BuildTarget buildTarget) {
            return hasCachedTargetNodeForBuildTarget(buildTarget);
          }
        };
    this.buildFileTrees =
        CacheBuilder.newBuilder()
            .build(
                new CacheLoader<Cell, BuildFileTree>() {
                  @Override
                  public BuildFileTree load(Cell cell) throws Exception {
                    return new FilesystemBackedBuildFileTree(
                        cell.getFilesystem(), cell.getBuildFileName());
                  }
                });
    this.buildFileDependents = HashMultimap.create();
    this.cachedEnvironment = ImmutableMap.of();
    this.cachedIncludes = new ConcurrentHashMap<>();
    this.knownCells = Collections.synchronizedSet(new HashSet<Cell>());

    this.cachedStateLock = new AutoCloseableReadWriteUpdateLock();
  }
Example #11
0
public class ApplicationRepository {
  private final Set<Application> appSet = Collections.synchronizedSet(new HashSet<Application>());

  public void addApplication(Application app) {
    appSet.add(app);
  }

  public Applications getApplicationsByJobPosting(JobPosting jobPosting) {
    ApplicationFilter filter = new ApplicationFilter();
    filter = filter.byJobPosting(jobPosting);
    return getApplications(filter);
  }

  Applications getApplications(ApplicationFilter filter) {
    Applications apps = new Applications();
    synchronized (appSet) {
      Iterator<Application> iterator = appSet.iterator();
      while (iterator.hasNext()) {
        Application app = iterator.next();
        if (!filter.pass(app)) continue;
        apps.add(app);
      }
      return apps;
    }
  }
}
Example #12
0
@ServerEndpoint("/chatendpoint")
public class ChatEndPoint {

  private static Set<Session> wsSessions = Collections.synchronizedSet(new HashSet<Session>());

  // Sends a message to all sessions connected to this endpoint
  @OnMessage
  public void onMessage(String message) throws IOException, EncodeException {
    for (Session wsSession : wsSessions) {
      wsSession.getBasicRemote().sendObject(message);
    }
  }

  // Adds the session to the session set when a user connects
  @OnOpen
  public void onOpen(Session wsSession) {
    wsSessions.add(wsSession);
  }

  // Removes the session from the session set when connection is closed
  @OnClose
  public void onClose(Session wsSession) {
    wsSessions.remove(wsSession);
  }
}
  /**
   * Test usage from multiple threads.
   *
   * @throws InterruptedException if interrupted
   */
  public final void testInstancesUsedFromMultipleThreads() throws InterruptedException {
    final Set<Touchable> set = Collections.synchronizedSet(new HashSet<Touchable>());
    final List<Touchable> list = Collections.synchronizedList(new ArrayList<Touchable>());
    final ComponentAdapter componentAdapter =
        new ThreadLocalizing.ThreadLocalized(
            new ConstructorInjection.ConstructorInjector(
                Touchable.class, SimpleTouchable.class, null));
    final Touchable touchable =
        (Touchable) componentAdapter.getComponentInstance(null, ComponentAdapter.NOTHING.class);

    final Thread[] threads = {
      new Thread(new Runner(touchable, list, set), "junit-1"),
      new Thread(new Runner(touchable, list, set), "junit-2"),
      new Thread(new Runner(touchable, list, set), "junit-3"),
    };
    for (int i = threads.length; i-- > 0; ) {
      threads[i].start();
    }
    Thread.sleep(300);
    for (int i = threads.length; i-- > 0; ) {
      synchronized (threads[i]) {
        threads[i].notifyAll();
      }
    }
    Thread.sleep(300);
    for (int i = threads.length; i-- > 0; ) {
      threads[i].interrupt();
    }
    Thread.sleep(300);
    assertEquals(6, list.size());
    assertEquals(3, set.size());
  }
  @Override
  public final synchronized boolean check(CueSheet cs) {
    boolean Ret = true;
    try {
      // タイトルの重複を検査するためのマップ。
      Set<String> testSet = Collections.synchronizedSet(new HashSet<>());
      Iterator<TrackData> it_t;
      it_t = cs.getAllTrackData().iterator();
      while (it_t.hasNext()) {
        TrackData TD = it_t.next();

        final String s;
        if (TD.getTitle() != null) {
          s = TD.getTitle();
          if (testSet.contains(s) == false) {
            testSet.add(TD.getTitle());
            log.info("タイトルは重複していません。 {}", TD.getTitle());
            log.info("正常");
            Ret = Ret & true;
          } else {
            log.info("タイトルが重複しています。  {}", TD.getTitle());
            Ret = Ret & false;
          }
        } else {
          log.info("タイトルがセットされていません。");
          return false;
        }
      }
      log.info("判定結果  {}", Ret);
      return Ret;
    } catch (Exception e) {
      log.error(" エラーです。", e);
      return false;
    }
  }
 private Set<Task> getMutableTasksWithTag(Object tag) {
   if (tag == null) {
     System.out.println("argph, null");
   }
   tasksByTag.putIfAbsent(tag, Collections.synchronizedSet(new LinkedHashSet<Task>()));
   return tasksByTag.get(tag);
 }
Example #16
0
 /**
  * Fetches data for the given primary keys.
  *
  * @param pksToDo a Map of the primary keys to fetch
  * @param results a Map to hold results that are to be added to the cache
  * @param cldToObjectsForCld a Map of Lists of objects relevant to PrimaryKeys
  * @param time1 the time that processing started
  * @throws ObjectStoreException if something goes wrong
  */
 protected void doPks(
     Map<PrimaryKey, ClassDescriptor> pksToDo,
     Map<InterMineObject, Set<InterMineObject>> results,
     Map<ClassDescriptor, List<InterMineObject>> cldToObjectsForCld,
     long time1)
     throws ObjectStoreException {
   Set<Integer> fetchedObjectIds = Collections.synchronizedSet(new HashSet<Integer>());
   Map<PrimaryKey, ClassDescriptor> pksNotDone =
       new IdentityHashMap<PrimaryKey, ClassDescriptor>(pksToDo);
   while (!pksToDo.isEmpty()) {
     int startPksToDoSize = pksToDo.size();
     Iterator<PrimaryKey> pkIter = pksToDo.keySet().iterator();
     while (pkIter.hasNext()) {
       PrimaryKey pk = pkIter.next();
       ClassDescriptor cld = pksToDo.get(pk);
       if (canDoPkNow(pk, cld, pksNotDone)) {
         // LOG.error("Running pk " + cld.getName() + "." + pk.getName());
         doPk(pk, cld, results, cldToObjectsForCld.get(cld), fetchedObjectIds);
         pkIter.remove();
         pksNotDone.remove(pk);
       } else {
         // LOG.error("Cannot do pk " + cld.getName() + "." + pk.getName() + " yet");
       }
     }
     if (pksToDo.size() == startPksToDoSize) {
       throw new RuntimeException("Error - cannot fetch any pks: " + pksToDo.keySet());
     }
   }
   long time2 = System.currentTimeMillis();
   timeSpentPrefetchEquiv += time2 - time1;
   dataTracker.prefetchIds(fetchedObjectIds);
   time1 = System.currentTimeMillis();
   timeSpentPrefetchTracker += time1 - time2;
 }
Example #17
0
 public ExecutionPhase(
     List<HostExecutor> hostExecutors,
     ExecutionContext executionContext,
     HostExecutorBuilder hostExecutorBuilder,
     LocalCommandFactory localCommandFactory,
     ImmutableMap<String, String> templateDefaults,
     File succeededLogDir,
     File failedLogDir,
     Supplier<List<TestBatch>> testBatchSupplier,
     Set<String> executedTests,
     Set<String> failedTests,
     Logger logger)
     throws IOException {
   super(hostExecutors, localCommandFactory, templateDefaults, logger);
   this.executionContext = executionContext;
   this.hostExecutorBuilder = hostExecutorBuilder;
   this.succeededLogDir = succeededLogDir;
   this.failedLogDir = failedLogDir;
   this.testBatchSupplier = testBatchSupplier;
   this.executedTests = executedTests;
   this.failedTests = failedTests;
   this.parallelWorkQueue = new LinkedBlockingQueue<TestBatch>();
   this.isolatedWorkQueue = new LinkedBlockingQueue<TestBatch>();
   this.failedTestResults = Collections.synchronizedSet(new HashSet<TestBatch>());
 }
Example #18
0
  /**
   * Transaction constructor.
   *
   * @param newParentStack Parent stack for this transaction.
   * @param newEncapsulatedChannel Underlying channel for this transaction.
   */
  protected SIPTransaction(
      SIPTransactionStack newParentStack, MessageChannel newEncapsulatedChannel) {

    sipStack = newParentStack;
    this.semaphore = new Semaphore(1, true);

    encapsulatedChannel = newEncapsulatedChannel;
    // Record this to check if the address has changed before sending
    // message to avoid possible race condition.
    this.peerPort = newEncapsulatedChannel.getPeerPort();
    this.peerAddress = newEncapsulatedChannel.getPeerAddress();
    this.peerInetAddress = newEncapsulatedChannel.getPeerInetAddress();
    // @@@ hagai
    this.peerPacketSourcePort = newEncapsulatedChannel.getPeerPacketSourcePort();
    this.peerPacketSourceAddress = newEncapsulatedChannel.getPeerPacketSourceAddress();
    this.peerProtocol = newEncapsulatedChannel.getPeerProtocol();
    if (this.isReliable()) {
      encapsulatedChannel.useCount++;
      if (sipStack.isLoggingEnabled(LogWriter.TRACE_DEBUG))
        sipStack
            .getStackLogger()
            .logDebug(
                "use count for encapsulated channel" + this + " " + encapsulatedChannel.useCount);
    }

    this.currentState = null;

    disableRetransmissionTimer();
    disableTimeoutTimer();
    eventListeners = Collections.synchronizedSet(new HashSet<SIPTransactionEventListener>());

    // Always add the parent stack as a listener
    // of this transaction
    addEventListener(newParentStack);
  }
Example #19
0
  @SuppressWarnings("unused")
  // Only used for debugging
  private static class ThrowingBitmapTracker implements BitmapTracker {
    private final Set<Bitmap> bitmaps = Collections.synchronizedSet(new HashSet<Bitmap>());

    @Override
    public void add(Bitmap bitmap) {
      if (bitmaps.contains(bitmap)) {
        throw new IllegalStateException(
            "Can't add already added bitmap: "
                + bitmap
                + " ["
                + bitmap.getWidth()
                + "x"
                + bitmap.getHeight()
                + "]");
      }
      bitmaps.add(bitmap);
    }

    @Override
    public void remove(Bitmap bitmap) {
      if (!bitmaps.contains(bitmap)) {
        throw new IllegalStateException("Cannot remove bitmap not in tracker");
      }
      bitmaps.remove(bitmap);
    }
  }
@ServerEndpoint(
    value = "/spende",
    encoders = {SpendeEncoder.class})
public class MonitorWebSocket {
  public static final String AKTION_ID = "AktionId";

  private static Set<Session> sessions = Collections.synchronizedSet(new HashSet<Session>());

  public static Set<Session> getSessions() {
    return sessions;
  }

  private Logger logger = Logger.getLogger(MonitorWebSocket.class.getName());

  @Inject private SpendeListProvider spendeListProvider;

  @OnOpen
  public void onOpen(Session session) {
    logger.info("Client hat sich verbunden: " + session);
    sessions.add(session);
  }

  @OnClose
  public void onClose(Session session) {
    logger.info("Client hat Verbindung getrennt: " + session);
    sessions.remove(session);
  }

  @OnMessage
  public void setAktionId(Long aktionId, Session session) {
    logger.info("Client " + session.getId() + " hat Aktion " + aktionId + " ausgewählt.");
    try {
      List<Spende> result = new LinkedList<>();
      try {
        result = spendeListProvider.getSpendeList(aktionId);
      } catch (NotFoundException e) {
        session
            .getBasicRemote()
            .sendText("Die Aktion mit der ID: " + aktionId + " ist nicht verfügbar");
      } catch (WebApplicationException e) {
        logger.log(
            Level.SEVERE,
            "Die Spendenliste für Aktion mit ID: "
                + aktionId
                + " konnte nicht abgerufen werden. Läuft der JBoss?",
            e);
        session.getBasicRemote().sendText("Fehler beim Abruf der initialen Spendenliste.");
      }
      session.getUserProperties().put(AKTION_ID, aktionId);
      for (Spende spende : result) {
        logger.info("Sende " + spende + " an Client " + session.getId());
        session.getBasicRemote().sendObject(spende);
      }
      session.getBasicRemote().sendText("Aktion geändert zu: " + aktionId);
    } catch (IOException | EncodeException e) {
      logger.log(Level.INFO, "Keine Verbindung zu Client: " + session, e);
    }
  }
}
  protected Set<Edge> getInEdgeObjects(final String... labels) {
    Map<String, Set<Edge>> inCache = getInEdgeCache();
    Set<Edge> result = null;
    if (labels == null || labels.length == 0) {
      result = Collections.synchronizedSet(new LinkedHashSet<Edge>());
      Set<String> labelSet = this.getInEdgeLabels();
      //			System.out.println("INFO: Getting all IN edges for a vertex across " + labelSet.size() +
      // " labels.");
      for (String label : labelSet) {
        result.addAll(getInEdgeObjects(label));
      }
      //			System.out.println("INFO: Found " + result.size() + " IN edges.");
    } else if (labels.length == 1) {
      String label = labels[0];
      // System.out.println("Getting in edges from " + getClass().getName() + " with label: " +
      // label + " ...");
      synchronized (inCache) {
        result = inCache.get(label);
      }
      if (result == null) {
        // result = Collections.synchronizedSet(new LinkedHashSet<Edge>());
        Set<String> edgeIds = getInEdgesSet(label);
        Set<Edge> edges = getParent().getEdgesFromIds(edgeIds);
        if (edges != null) {
          result = Collections.synchronizedSet((LinkedHashSet) edges);
        }

        // Set<Edge> allEdges = Collections.unmodifiableSet(getInEdgeObjects());
        // for (Edge edge : allEdges) {
        // if (label.equals(edge.getLabel())) {
        // result.add(edge);
        // }
        // }

        synchronized (inCache) {
          inCache.put(label, result);
        }
      }
    } else {
      result = Collections.synchronizedSet(new LinkedHashSet<Edge>());
      for (String label : labels) {
        result.addAll(getInEdgeObjects(label));
      }
    }
    return Collections.unmodifiableSet(result);
  }
 public static <T extends Interceptor> void register(Class<T> type, T instance) {
   if (type == null) throw new IllegalArgumentException("Type may not be null");
   if (instance == null) throw new IllegalArgumentException("Instance may not be null");
   if (!interceptors.containsKey(type)) {
     interceptors.put(type, Collections.synchronizedSet(new HashSet<Object>()));
   }
   interceptors.get(type).add(instance);
 }
 public DefaultListeningIOReactor(
     int workerCount, final ThreadFactory threadFactory, final HttpParams params)
     throws IOReactorException {
   super(workerCount, threadFactory, params);
   this.requestQueue = new ConcurrentLinkedQueue<ListenerEndpointImpl>();
   this.endpoints = Collections.synchronizedSet(new HashSet<ListenerEndpointImpl>());
   this.pausedEndpoints = new HashSet<SocketAddress>();
 }
 /**
  * PRIVATE - called by I/O Manager (ProjectReader); 'types' should only hold "user" types after it
  * is initialized by the I/O Manager
  */
 private void setTypes(Collection types) {
   this.types = new Hashtable(types.size());
   this.typeNames = new Hashtable(types.size());
   for (Iterator stream = types.iterator(); stream.hasNext(); ) {
     this.addType((MWClass) stream.next());
   }
   this.userTypes = Collections.synchronizedSet(new HashSet(types));
 }
Example #25
0
  /**
   * LectorBitmaps: Constructor. Obtiene el alto de la pantalla y el contenedor de la imagen, y
   * obtiene la imagen cargando que se muestra mientras se obtienen los bitmaps.
   */
  public LectorBitmaps(Activity activity) {
    int pantalla_alto = activity.getResources().getDisplayMetrics().heightPixels;
    longitud_lado_img_lista = pantalla_alto / RELACION_PANTALLA_LISTA;
    bitmaps_desechados = Collections.synchronizedSet(new HashSet<SoftReference<Bitmap>>());

    obtenerImagenCargando(activity);
    obtenerMemoriaCache();
  }
Example #26
0
  protected void read(DataInputStream s) {
    try {
      ref = new WeakReference<DataBuffer>(this, Nd4j.bufferRefQueue());
      referencing = Collections.synchronizedSet(new HashSet<String>());
      dirty = new AtomicBoolean(false);
      allocationMode = AllocationMode.valueOf(s.readUTF());
      length = s.readInt();
      Type t = Type.valueOf(s.readUTF());
      if (t == Type.DOUBLE) {
        if (allocationMode == AllocationMode.HEAP) {
          if (this.dataType() == Type.FLOAT) { // DataBuffer type
            // double -> float
            floatData = new float[length()];
          } else if (this.dataType() == Type.DOUBLE) {
            // double -> double
            doubleData = new double[length()];
          } else {
            // double -> int
            intData = new int[length()];
          }
          for (int i = 0; i < length(); i++) {
            put(i, s.readDouble());
          }
        } else {
          wrappedBuffer = ByteBuffer.allocateDirect(length() * getElementSize());
          wrappedBuffer.order(ByteOrder.nativeOrder());
          for (int i = 0; i < length(); i++) {
            put(i, s.readDouble());
          }
        }
      } else {
        if (allocationMode == AllocationMode.HEAP) {
          if (this.dataType() == Type.FLOAT) { // DataBuffer type
            // float -> float
            floatData = new float[length()];
          } else if (this.dataType() == Type.DOUBLE) {
            // float -> double
            doubleData = new double[length()];
          } else {
            // float-> int
            intData = new int[length()];
          }
          for (int i = 0; i < length(); i++) {
            put(i, s.readFloat());
          }
        } else {
          wrappedBuffer = ByteBuffer.allocateDirect(length() * getElementSize());
          wrappedBuffer.order(ByteOrder.nativeOrder());
          for (int i = 0; i < length(); i++) {
            put(i, s.readFloat());
          }
        }
      }

    } catch (Exception e) {
      throw new RuntimeException(e);
    }
  }
    /**
     * {@inheritDoc}
     *
     * <p>Adds the specified {@code sessionId} to the per-channel cache for the given channel's
     * local member sessions, and sends a CHANNEL_JOIN protocol message to the session with the
     * corresponding {@code sessionId}.
     */
    public void join(String name, byte[] channelId, byte[] sessionId) {
      callStarted();
      try {
        if (logger.isLoggable(Level.FINEST)) {
          logger.log(
              Level.FINEST,
              "join channelId:{0} sessionId:{1}",
              HexDumper.toHexString(channelId),
              HexDumper.toHexString(sessionId));
        }

        // Update local channel membership cache.
        BigInteger channelRefId = new BigInteger(1, channelId);
        Set<BigInteger> localMembers = localChannelMembersMap.get(channelRefId);
        if (localMembers == null) {
          Set<BigInteger> newLocalMembers = Collections.synchronizedSet(new HashSet<BigInteger>());
          localMembers = localChannelMembersMap.putIfAbsent(channelRefId, newLocalMembers);
          if (localMembers == null) {
            localMembers = newLocalMembers;
          }
        }
        BigInteger sessionRefId = new BigInteger(1, sessionId);
        localMembers.add(sessionRefId);

        // Update per-session channel set cache.
        Set<BigInteger> channelSet = localPerSessionChannelsMap.get(sessionRefId);
        if (channelSet == null) {
          Set<BigInteger> newChannelSet = Collections.synchronizedSet(new HashSet<BigInteger>());
          channelSet = localPerSessionChannelsMap.putIfAbsent(sessionRefId, newChannelSet);
          if (channelSet == null) {
            channelSet = newChannelSet;
          }
        }
        channelSet.add(channelRefId);

        // Send CHANNEL_JOIN protocol message.
        MessageBuffer msg = new MessageBuffer(1 + MessageBuffer.getSize(name) + channelId.length);
        msg.putByte(SimpleSgsProtocol.CHANNEL_JOIN).putString(name).putBytes(channelId);
        sessionService.sendProtocolMessageNonTransactional(
            sessionRefId, ByteBuffer.wrap(msg.getBuffer()).asReadOnlyBuffer(), Delivery.RELIABLE);

      } finally {
        callFinished();
      }
    }
 /** initialize persistent state */
 protected void initialize(Node parent) {
   super.initialize(parent);
   this.types = new Hashtable();
   this.typeNames = new Hashtable();
   this.userTypes = Collections.synchronizedSet(new HashSet());
   this.classpathEntries = new Vector();
   this.userTypeNames = new HashSet();
   this.persistLastRefresh = true;
 }
  private void handleAppSubmitEvent(DelegationTokenRenewerAppSubmitEvent evt)
      throws IOException, InterruptedException {
    ApplicationId applicationId = evt.getApplicationId();
    Credentials ts = evt.getCredentials();
    boolean shouldCancelAtEnd = evt.shouldCancelAtEnd();
    if (ts == null) {
      return; // nothing to add
    }

    if (LOG.isDebugEnabled()) {
      LOG.debug("Registering tokens for renewal for:" + " appId = " + applicationId);
    }

    Collection<Token<?>> tokens = ts.getAllTokens();
    long now = System.currentTimeMillis();

    // find tokens for renewal, but don't add timers until we know
    // all renewable tokens are valid
    // At RM restart it is safe to assume that all the previously added tokens
    // are valid
    appTokens.put(
        applicationId, Collections.synchronizedSet(new HashSet<DelegationTokenToRenew>()));
    Set<DelegationTokenToRenew> tokenList = new HashSet<DelegationTokenToRenew>();
    boolean hasHdfsToken = false;
    for (Token<?> token : tokens) {
      if (token.isManaged()) {
        tokenList.add(
            new DelegationTokenToRenew(
                applicationId, token, getConfig(), now, shouldCancelAtEnd, evt.getUser()));
        if (token.getKind().equals(new Text("HDFS_DELEGATION_TOKEN"))) {
          LOG.info(applicationId + " found existing hdfs token " + token);
          hasHdfsToken = true;
        }
      }
    }

    if (!tokenList.isEmpty()) {
      // Renewing token and adding it to timer calls are separated purposefully
      // If user provides incorrect token then it should not be added for
      // renewal.
      for (DelegationTokenToRenew dtr : tokenList) {
        try {
          renewToken(dtr);
        } catch (IOException ioe) {
          throw new IOException("Failed to renew token: " + dtr.token, ioe);
        }
      }
      for (DelegationTokenToRenew dtr : tokenList) {
        appTokens.get(applicationId).add(dtr);
        setTimerForTokenRenewal(dtr);
      }
    }

    if (!hasHdfsToken) {
      requestNewHdfsDelegationToken(applicationId, evt.getUser(), shouldCancelAtEnd);
    }
  }
 /**
  * Copies the contents of the SuppressBioFieldFacet from one Player Character to another Player
  * Character, based on the given CharIDs representing those Player Characters.
  *
  * <p>This is a method in SuppressBioFieldFacet in order to avoid exposing the mutable Map object
  * to other classes. This should not be inlined, as the Set is internal information to
  * SuppressBioFieldFacet and should not be exposed to other classes.
  *
  * <p>Note also the copy is a one-time event and no references are maintained between the Player
  * Characters represented by the given CharIDs (meaning once this copy takes place, any change to
  * the SuppressBioFieldFacet will only impact the Player Character where the SuppressBioFieldFacet
  * was changed).
  *
  * @param source The CharID representing the Player Character from which the information should be
  *     copied
  * @param copy The CharID representing the Player Character to which the information should be
  *     copied
  */
 @Override
 public void copyContents(CharID source, CharID copy) {
   Set<BiographyField> set = (Set<BiographyField>) getCache(source);
   if (set != null) {
     Set<BiographyField> copyset = Collections.synchronizedSet(new HashSet<BiographyField>());
     copyset.addAll(set);
     setCache(copy, copyset);
   }
 }