Example #1
1
  /** @{inheritDoc */
  public void setVmStatus(final String instanceId, final CloudVmStatus status) {

    Runnable task =
        new Runnable() {
          @Override
          public void run() {
            tracker.setStatus(status);
            if (status.getJobStatus() == JobStatus.Completed
                || status.getVmStatus() == VMStatus.terminated) {
              // will terrminate instance after waiting for some cleanup time
              terminator.terminate(status.getInstanceId());
              // check job status and kill off instances appropriately
              checkJobStatus(status.getJobId(), mailService);
            }
          }
        };
    if (status.getJobStatus() == JobStatus.Completed
        || status.getVmStatus() == VMStatus.terminated) {
      Thread t = new Thread(task);
      t.setDaemon(true);
      t.start();
    } else {
      EXECUTOR.execute(task);
    }
  }
Example #2
0
  @Test
  public void testConcurrentPutGet() throws NetInfCheckedException, InterruptedException {
    List<InformationObject> insertedIOs = new ArrayList<InformationObject>();
    ThreadPoolExecutor executor =
        new ThreadPoolExecutor(
            5, 10, 10, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(10000));

    for (int i = 0; i < NODE_NUMBER; i++) {
      for (int j = 0; j < IOS_PER_NODE; j++) {
        InformationObject io = createUniqueIO();
        executor.execute(new NodePutCommand(resolutionServices.get(i), io));
        insertedIOs.add(io);
      }
    }

    for (InformationObject io : insertedIOs) {
      NodeGetCommand getter = new NodeGetCommand(resolutionServices.get(0), io);
      executor.execute(getter);
      getterCommands.add(getter);
    }
    executor.shutdown();
    executor.awaitTermination(30, TimeUnit.SECONDS);

    for (NodeGetCommand getter : getterCommands) {
      Assert.assertTrue(getter.isCorrect());
    }
  }
  @Test
  public void testHttpWrappedContinuatuions() throws Exception {
    SpringBusFactory bf = new SpringBusFactory();
    Bus bus = bf.createBus(CONFIG_FILE);
    BusFactory.setDefaultBus(bus);

    QName serviceName =
        new QName("http://cxf.apache.org/systest/jaxws", "HelloContinuationService");

    URL wsdlURL = getClass().getResource("/org/apache/cxf/systest/jms/continuations/test.wsdl");

    HelloContinuationService service = new HelloContinuationService(wsdlURL, serviceName);
    assertNotNull(service);
    final HelloContinuation helloPort = service.getHelloContinuationPort();

    ThreadPoolExecutor executor =
        new ThreadPoolExecutor(5, 5, 0, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(10));
    CountDownLatch startSignal = new CountDownLatch(1);
    CountDownLatch helloDoneSignal = new CountDownLatch(5);

    executor.execute(new HelloWorker(helloPort, "Fred", "", startSignal, helloDoneSignal));
    executor.execute(new HelloWorker(helloPort, "Barry", "Jameson", startSignal, helloDoneSignal));
    executor.execute(new HelloWorker(helloPort, "Harry", "", startSignal, helloDoneSignal));
    executor.execute(new HelloWorker(helloPort, "Rob", "Davidson", startSignal, helloDoneSignal));
    executor.execute(
        new HelloWorker(helloPort, "James", "ServiceMix", startSignal, helloDoneSignal));

    startSignal.countDown();
    helloDoneSignal.await(60, TimeUnit.SECONDS);
    executor.shutdownNow();
    assertEquals("Not all invocations have completed", 0, helloDoneSignal.getCount());
  }
    @Override
    public void run() {
      running = true;
      while (running) {
        try {
          addNewRunnables();

          if (activeRunnables == null) {
            continue;
          }

          int tasksExecuted = 0;

          while (activeRunnables != null && activeRunnables.runnable.isReady()) {
            executorService.execute(activeRunnables.runnable);
            activeRunnables = activeRunnables.next;
            tasksExecuted++;
          }

          if (activeRunnables == null) {
            if (log.isTraceEnabled() && tasksExecuted > 0) {
              log.tracef(
                  "Tasks executed=%s, still active=%s", tasksExecuted, count(activeRunnables));
            }
            continue;
          }

          RunnableEntry iterator = activeRunnables;
          while (iterator.next != null) {
            RunnableEntry toAnalyze = iterator.next;
            if (toAnalyze.runnable.isReady()) {
              executorService.execute(toAnalyze.runnable);
              iterator.next = toAnalyze.next;
              tasksExecuted++;
            } else {
              iterator = iterator.next;
            }
          }

          if (log.isTraceEnabled() && tasksExecuted > 0) {
            log.tracef("Tasks executed=%s, still active=%s", tasksExecuted, count(activeRunnables));
          }
        } catch (InterruptedException e) {
          break;
        } catch (Throwable throwable) {
          if (log.isTraceEnabled()) {
            log.tracef(throwable, "Exception caught while executing task");
          } else {
            log.warnf("Exception caught while executing task: %s", throwable.getLocalizedMessage());
          }
        }
      }
      executorService.shutdown();
      clearAll();
    }
  @Test
  public void testAddAndGetTile() throws InterruptedException, FileNotFoundException, IOException {
    // Input stream to use
    ImageInputStream stream_in = null;
    try {
      stream_in = new FileImageInputStream(TestData.file(this, "world.tiff"));
      // Input RenderedImage to use
      final RenderedOp input =
          ImageReadDescriptor.create(
              stream_in, 0, false, false, false, null, null, null, null, null);

      // Boolean used for checking if the conditions are passed
      final AtomicBoolean passed = new AtomicBoolean(true);
      // Cache creation
      final ConcurrentTileCacheMultiMap cache =
          new ConcurrentTileCacheMultiMap(1000 * 1000, false, 1f, 4);
      // Selection of one tile from the image
      Raster data = input.getTile(input.getMinTileX(), input.getMinTileY());
      // Setting the tile inside the cache
      cache.add(input, input.getMinTileX(), input.getMinTileY(), data);
      // Thread pool to use for doing concurrent access on the cache
      ThreadPoolExecutor executor =
          new ThreadPoolExecutor(
              TOTAL, TOTAL, 60, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(1000000));
      // Latch used for waiting all the threads to end their work
      final CountDownLatch latch = new CountDownLatch(TOTAL);
      // Cycle for launching various requests
      int counter = TOTAL;
      while (counter > 0) {

        executor.execute(
            new Runnable() {

              public void run() {
                // Get the tile to use
                Raster data = cache.getTile(input, input.getMinTileX(), input.getMinTileY());
                if (data == null) {
                  passed.getAndSet(false);
                }
                latch.countDown();
              }
            });
        // Counter update
        counter--;
      }
      // Waiting all threads to finish
      latch.await();
      // Ensure that all the threads have found the tile
      Assert.assertTrue(passed.get());
    } finally {
      try {
        if (stream_in != null) {
          stream_in.flush();
          stream_in.close();
        }
      } catch (Throwable t) {
        //
      }
    }
  }
 private void stopHeartBeat(Channel channel) {
   HeartBeatWorker worker = heartBeatWorkerMap.get(channel);
   if (worker != null) {
     threadPool.execute(new HeartBeatWorkerStoper(worker));
   }
   heartBeatWorkerMap.remove(channel);
 }
 public static void main(String[] args) throws Exception {
   if (args.length != 5) {
     System.out.println(
         "usage: Main riakurl bucketName filename filetype(1=accesslog,2=pgdump) threads");
     System.exit(0);
   }
   fileType = Integer.parseInt(args[3]);
   threads = Integer.parseInt(args[4]);
   threadPoolExecutor =
       new ThreadPoolExecutor(
           threads, threads, 1, TimeUnit.DAYS, new ArrayBlockingQueue<Runnable>(threads));
   String url = args[0];
   File inputFile = new File(args[2]);
   bucketName = args[1];
   BufferedReader br = new BufferedReader(new FileReader(inputFile));
   int lines = 0;
   if (fileType == FILETYPE_PG_DUMP) headers = br.readLine().split(";");
   while (br.ready()) {
     lines++;
     String line = br.readLine();
     processLine(line);
     if (lines >= BATCH_SIZE) {
       while (threadPoolExecutor.getQueue().size() >= threads - 1) Thread.sleep(1000);
       threadPoolExecutor.execute(new RiakDumper(url, blogStats));
       currentTimeStamp = parsedTimestamp;
       lines = 0;
       blogStats = new BlogStats();
     }
   }
 }
 /**
  * Drives the actual test on an Executor and verifies the result
  *
  * @param maps the caches to be tested
  * @throws IOException
  * @throws InterruptedException
  */
 private void testConcurrentLocking(List<ConcurrentMap<String, String>> maps)
     throws IOException, InterruptedException {
   SharedStats stats = new SharedStats();
   ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(NODES_NUM);
   List<StressingThread> threads = new ArrayList<StressingThread>();
   for (ConcurrentMap<String, String> map : maps) {
     StressingThread thread = new StressingThread(stats, map);
     threads.add(thread);
     executor.execute(thread);
   }
   executor.shutdown();
   Thread.sleep(5000);
   int putsAfter5Seconds = stats.succesfullPutsCounter.get();
   System.out.println("\nSituation after 5 seconds:");
   System.out.println(stats.toString());
   executor.awaitTermination(STRESS_TIME_MINUTES, TimeUnit.MINUTES);
   stats.globalQuit = true;
   executor.awaitTermination(10, TimeUnit.SECONDS); // give some time to awake and quit
   executor.shutdownNow();
   System.out.println("\nFinal situation:");
   System.out.println(stats.toString());
   assert !stats.seenFailures : "at least one thread has seen unexpected state";
   assert stats.succesfullPutsCounter.get() > 0 : "the lock should have been taken at least once";
   assert stats.succesfullPutsCounter.get() > putsAfter5Seconds
       : "the lock count didn't improve since the first 5 seconds. Deadlock?";
   assert stats.succesfullPutsCounter.get() == stats.lockReleasedCounter.get()
       : "there's a mismatch in acquires and releases count";
   assert stats.lockOwnersCounter.get() == 0 : "the lock is still held at test finish";
 }
 /**
  * Requests repairs for the given table and column families, and blocks until all repairs have
  * been completed.
  */
 public RepairFuture submitRepairSession(
     Range<Token> range, String tablename, boolean isSequential, String... cfnames) {
   RepairFuture futureTask =
       new RepairSession(range, tablename, isSequential, cfnames).getFuture();
   executor.execute(futureTask);
   return futureTask;
 }
  public static void main(String[] args) {
    ThreadPoolExecutor threadPoolExecutor =
        (ThreadPoolExecutor) Executors.newFixedThreadPool(20); // 创建一个线程池
    final Semaphore semaphore = new Semaphore(5); // 设置一个信号量,其值为5,代表共享区一次只能有5个线程同时访问

    for (int i = 0; i < 100; i++) {
      final int threadIndex = i; // 给每个线程一个标志
      Runnable runnable =
          new Runnable() {
            public void run() {
              try {
                semaphore.acquire();
                System.out.println("Access:" + threadIndex + "线程正在运行.....");
                TimeUnit.MILLISECONDS.sleep(5000);
                semaphore.release();
                System.out.println("还有" + semaphore.availablePermits() + "个资源可以用\n");

              } catch (InterruptedException e) {
                e.printStackTrace();
              }
            }
          };
      threadPoolExecutor.execute(runnable); // 将线程放入线程池,让线程池去执行
    }
    threadPoolExecutor.shutdown(); // 结束后要关闭线程池
  }
Example #11
0
  public void flush() throws BufferClosedException {
    this.lock.lock();
    try {

      if (this.flushing) {
        if (SDFSLogger.isDebug())
          SDFSLogger.getLog()
              .debug("cannot flush buffer at pos " + this.getFilePosition() + " already flushing");
        throw new BufferClosedException("Buffer Closed");
      }
      if (this.closed) {
        if (SDFSLogger.isDebug())
          SDFSLogger.getLog()
              .debug("cannot flush buffer at pos " + this.getFilePosition() + " closed");
        throw new BufferClosedException("Buffer Closed");
      }
      this.flushing = true;
      if (this.isDirty() || this.isHlAdded()) {
        this.df.putBufferIntoFlush(this);
        if (Main.chunkStoreLocal) lexecutor.execute(this);
        else SparseDedupFile.pool.execute(this);
      }
    } finally {
      this.lock.unlock();
    }
  }
  /**
   * 加载web图片,并置入缓存中
   *
   * @param path
   */
  private void addLoadWebImageThreadInPool(final String path, final ImageLoadListener listener) {
    Runnable runnable =
        new Runnable() {

          @Override
          public void run() {
            Bitmap bmp = null;
            try {
              URL myurl = new URL(path);
              // 获得连接
              HttpURLConnection conn = (HttpURLConnection) myurl.openConnection();
              conn.setConnectTimeout(6000); // 设置超时
              conn.setDoInput(true);
              conn.setUseCaches(false); // 不缓存
              conn.connect();
              InputStream is = conn.getInputStream(); // 获得图片的数据流
              bmp = compressInStreamToBitmap(is);
              is.close();
            } catch (Exception e) {
              e.printStackTrace();
            } finally {
              if (bmp != null) {
                addBitmapToMemoryCache(path, bmp);
              }
              listener.onLoaded(bmp);
            }
          }
        };
    mThreadPoolExecutor.execute(runnable);
  }
  /**
   * Recurse through directories and list all XML files
   *
   * @param files
   * @throws ParserConfigurationException
   * @throws InterruptedException
   */
  public void walkThroughParse(File[] files)
      throws ParserConfigurationException, InterruptedException {
    FileFilter filter = new FileNameExtensionFilter("XML file", "xml");

    for (File file : files) {
      if (file.isDirectory()) {

        while (pool.getActiveCount() != 0) Thread.currentThread().sleep(1000);

        walkThroughParse(file.listFiles());
      } else if (file.isFile() && file.canRead()) {
        if (filter.accept(file)) {
          this.i++;
          pool.execute(
              (new ConvertFileThread(
                  this.fileOutputDir,
                  file,
                  this.failFile,
                  this.maxDirs,
                  i,
                  this.total,
                  this.print)));
        }
      }
    }
  }
 /** Asynchronously refreshes the tokens contained in this instance. */
 private void beginRefresh() {
   try {
     refreshExecutor.execute(
         new Runnable() {
           @Override
           public void run() {
             Log.debug("[oauth] Refreshing access token.");
             Tokens tokens;
             try {
               tokens =
                   oauthClient
                       .getTokensForClient(
                           tokenEndpoint, clientId, clientSecret, "manage_project:" + projectKey)
                       .get();
             } catch (Exception e) {
               update(null, e);
               return;
             }
             update(tokens, null);
           }
         });
   } catch (RejectedExecutionException e) {
     // another refresh is already in progress, ignore this one
   }
 }
  private void runProxies(
      BookStore proxy, int numberOfClients, boolean threadSafe, boolean stateCanBeChanged)
      throws Exception {
    ThreadPoolExecutor executor =
        new ThreadPoolExecutor(5, 5, 0, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(10));
    CountDownLatch startSignal = new CountDownLatch(1);
    CountDownLatch doneSignal = new CountDownLatch(numberOfClients);

    for (int i = 1; i <= numberOfClients; i++) {
      // here we do a double copy : from proxy to web client and back to proxy
      BookStore bs =
          !threadSafe
              ? JAXRSClientFactory.fromClient(
                  WebClient.fromClient(WebClient.client(proxy)), BookStore.class)
              : proxy;
      String bookName = stateCanBeChanged ? Integer.toString(i) : "TheBook";
      String bookHeader = stateCanBeChanged ? "value" + i : "CustomValue";

      executor.execute(
          new RootProxyWorker(
              bs, bookName, bookHeader, startSignal, doneSignal, stateCanBeChanged));
    }
    startSignal.countDown();
    doneSignal.await(60, TimeUnit.SECONDS);
    executor.shutdownNow();
    assertEquals("Not all invocations have completed", 0, doneSignal.getCount());
  }
 public void addTask(Runnable r) {
   try {
     threadPoolExecutor.execute(r);
   } catch (Exception ex) {
     log.error("add search domain exception:", ex);
   }
 }
Example #17
0
  private static void realMain(String[] args) throws Throwable {
    final int n = 4;
    final CyclicBarrier barrier = new CyclicBarrier(2 * n + 1);
    final ThreadPoolExecutor pool =
        new ThreadPoolExecutor(
            n, 2 * n, KEEPALIVE_MS, MILLISECONDS, new SynchronousQueue<Runnable>());
    final Runnable r =
        new Runnable() {
          public void run() {
            try {
              barrier.await();
              barrier.await();
            } catch (Throwable t) {
              unexpected(t);
            }
          }
        };

    for (int i = 0; i < 2 * n; i++) pool.execute(r);
    barrier.await();
    checkPoolSizes(pool, 2 * n, n, 2 * n);
    barrier.await();
    long nap = KEEPALIVE_MS + (KEEPALIVE_MS >> 2);
    for (long sleepyTime = 0L; pool.getPoolSize() > n; ) {
      check((sleepyTime += nap) <= LONG_DELAY_MS);
      Thread.sleep(nap);
    }
    checkPoolSizes(pool, n, n, 2 * n);
    Thread.sleep(nap);
    checkPoolSizes(pool, n, n, 2 * n);
    pool.shutdown();
    check(pool.awaitTermination(LONG_DELAY_MS, MILLISECONDS));
  }
Example #18
0
 @Override
 public void serviceRemoved(final URI service) {
   services.remove(service.toString());
   for (final DiscoveryListener discoveryListener : getListeners()) {
     executor.execute(new ServiceRemovedTask(discoveryListener, service));
   }
 }
    public Drawable loadDrawable(
        final Context context, final String imageUrl, final ImageCallback imageCallback) {
      if (cacheMap.containsKey(imageUrl)) {
        SoftReference<Drawable> softReference = cacheMap.get(imageUrl);
        Drawable drawable = softReference.get();
        if (drawable != null) {
          return drawable;
        }
      }

      final Handler handler =
          new Handler() {
            public void handleMessage(Message message) {
              imageCallback.imageLoaded((Drawable) message.obj, imageUrl);
            }
          };

      executor.execute(
          new Runnable() {
            public void run() {
              Drawable drawable = loadImageFromUrl(context, imageUrl);

              if (null != drawable) cacheMap.put(imageUrl, new SoftReference<Drawable>(drawable));

              Message message = handler.obtainMessage(0, drawable);
              handler.sendMessage(message);
            }
          });

      return null;
    }
  private AbstractShard getReadMap(byte[] hash) throws IOException {
    Lock l = gcLock.readLock();
    l.lock();
    // long v = ct.incrementAndGet();
    try {

      if (!runningGC && !lbf.mightContain(hash)) {
        // SDFSLogger.getLog().info("not in bloom filter");
        return null;
      }
    } finally {
      l.unlock();
    }
    Iterator<ProgressiveFileByteArrayLongMap> iter = activeReadMaps.iterator();
    while (iter.hasNext()) {
      ProgressiveFileByteArrayLongMap _m = iter.next();
      if (_m.containsKey(hash)) return _m;
    }
    iter = maps.iterator();
    while (iter.hasNext()) {
      ProgressiveFileByteArrayLongMap _m = iter.next();
      if (!activeReadMaps.contains(_m) && _m.containsKey(hash)) {
        al.lock();
        try {
          // SDFSLogger.getLog().info("adding active " +
          // _m.toString());
          if (activeReadMaps.remainingCapacity() == 0) {
            ProgressiveFileByteArrayLongMap obf = activeReadMaps.poll();
            // SDFSLogger.getLog().info("removed active " +
            // obf.toString());
            if (obf != null) obf.stopRun();
          }
          /*
           * if(activeReadMaps.offer(_m))
           * SDFSLogger.getLog().info("added active " +
           * _m.toString()); else
           * SDFSLogger.getLog().info("unable to add active " +
           * _m.toString());
           */
          try {
            loadCacheExecutor.execute(_m);
          } catch (Exception e) {
            if (SDFSLogger.isDebug()) SDFSLogger.getLog().debug("unable to cache " + _m, e);
          }
        } finally {
          al.unlock();
        }
        return _m;
      }
    }
    /*
    if(!runningGC) {
    	long mv = mt.incrementAndGet();
    	double pc = (double)mv/(double)v;
    	SDFSLogger.getLog().info("might be in bloom filter " + runningGC + " pc=" + pc);

    }
    */
    return null;
  }
Example #21
0
    @Override
    public synchronized void execute(final Runnable command) {
      Runnable r =
          new Runnable() {
            @Override
            public void run() {
              command.run();
              next();
            }
          };
      if (mCachedSerialExecutor.getActiveCount() < serialOneTime) {
        // 小于单次并发量直接运行
        mCachedSerialExecutor.execute(r);
      } else {
        // 如果大于并发上限,那么移除最老的任务
        if (mQueue.size() >= serialMaxCount) {
          mQueue.pollFirst();
        }
        // 新任务放在队尾
        mQueue.offerLast(r);

        // 动态获取目前cpu处理器数目,并调整设置。
        // int proCount = Runtime.getRuntime().availableProcessors();
        // if (proCount != cpuCount) {
        // cpuCount = proCount;
        // reSettings(proCount);
        // }
      }
    }
 public static void main(String[] args) {
   BlockingQueue<Runnable> queue = new LinkedBlockingQueue<Runnable>(12);
   ThreadFactory threadFactory =
       new ThreadFactory() {
         public Thread newThread(Runnable r) {
           int currentCount = counter.getAndIncrement();
           System.out.println("Creating new thread: " + currentCount);
           return new Thread(r, "mythread" + currentCount);
         }
       };
   RejectedExecutionHandler rejectedHandler =
       new RejectedExecutionHandler() {
         public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
           if (r instanceof ThreadPoolExecutorExample) {
             ThreadPoolExecutorExample example = (ThreadPoolExecutorExample) r;
             System.out.println("Rejecting task with id " + example.getTaskId());
           }
         }
       };
   ThreadPoolExecutor executor =
       new ThreadPoolExecutor(5, 12, 1, TimeUnit.SECONDS, queue, threadFactory, rejectedHandler);
   for (int i = 0; i < 100; i++) {
     executor.execute(new ThreadPoolExecutorExample(i));
   }
   executor.shutdown();
 }
  /**
   * Scans the specified path and populates the intermediate cache.
   *
   * @param absPath
   * @throws IOException
   */
  private void scanIntermediateDirectory(final Path absPath) throws IOException {
    if (LOG.isDebugEnabled()) {
      LOG.debug("Scanning intermediate dir " + absPath);
    }
    List<FileStatus> fileStatusList = scanDirectoryForHistoryFiles(absPath, intermediateDoneDirFc);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Found " + fileStatusList.size() + " files");
    }
    for (FileStatus fs : fileStatusList) {
      if (LOG.isDebugEnabled()) {
        LOG.debug("scanning file: " + fs.getPath());
      }
      JobIndexInfo jobIndexInfo = FileNameIndexUtils.getIndexInfo(fs.getPath().getName());
      String confFileName = JobHistoryUtils.getIntermediateConfFileName(jobIndexInfo.getJobId());
      String summaryFileName =
          JobHistoryUtils.getIntermediateSummaryFileName(jobIndexInfo.getJobId());
      HistoryFileInfo fileInfo =
          new HistoryFileInfo(
              fs.getPath(),
              new Path(fs.getPath().getParent(), confFileName),
              new Path(fs.getPath().getParent(), summaryFileName),
              jobIndexInfo,
              false);

      final HistoryFileInfo old = jobListCache.addIfAbsent(fileInfo);
      if (old == null || old.didMoveFail()) {
        final HistoryFileInfo found = (old == null) ? fileInfo : old;
        long cutoff = System.currentTimeMillis() - maxHistoryAge;
        if (found.getJobIndexInfo().getFinishTime() <= cutoff) {
          try {
            found.delete();
          } catch (IOException e) {
            LOG.warn("Error cleaning up a HistoryFile that is out of date.", e);
          }
        } else {
          if (LOG.isDebugEnabled()) {
            LOG.debug("Scheduling move to done of " + found);
          }
          moveToDoneExecutor.execute(
              new Runnable() {
                @Override
                public void run() {
                  try {
                    found.moveToDone();
                  } catch (IOException e) {
                    LOG.info("Failed to process fileInfo for job: " + found.getJobId(), e);
                  }
                }
              });
        }
      } else if (old != null && !old.isMovePending()) {
        // This is a duplicate so just delete it
        if (LOG.isDebugEnabled()) {
          LOG.debug("Duplicate: deleting");
        }
        fileInfo.delete();
      }
    }
  }
Example #24
0
 private void receive(SendPacket sendPacket, PinpointSocket pinpointSocket) {
   try {
     worker.execute(new Dispatch(sendPacket.getPayload(), pinpointSocket.getRemoteAddress()));
   } catch (RejectedExecutionException e) {
     // cause is clear - full stack trace not necessary
     logger.warn("RejectedExecutionException Caused:{}", e.getMessage());
   }
 }
Example #25
0
 private void requestResponse(RequestPacket requestPacket, PinpointSocket pinpointSocket) {
   try {
     worker.execute(new RequestResponseDispatch(requestPacket, pinpointSocket));
   } catch (RejectedExecutionException e) {
     // cause is clear - full stack trace not necessary
     logger.warn("RejectedExecutionException Caused:{}", e.getMessage());
   }
 }
Example #26
0
 public void executeTask(Task task) {
   System.out.printf("Server: A new task has arrived\n");
   executor.execute(task);
   System.out.printf("Server: Task count: %d \n", executor.getTaskCount());
   System.out.printf("Server: Pool size: %d \n", executor.getPoolSize());
   System.out.printf("Server: Active count: %d \n", executor.getActiveCount());
   System.out.printf("Server: Compeleted task : %d \n", executor.getCompletedTaskCount());
 }
Example #27
0
  @Override
  public void execute(Runnable runnable) {
    if (runnable == null) {
      throw new IllegalArgumentException("Runnable to execute cannot be null");
    }

    mThreadPoolExecutor.execute(runnable);
  }
Example #28
0
 public void execute(Thread command) {
   if (runningThreadNames.contains(command.getName())) {
     logger.warn("{} ===================> Running.", command.getName());
     return;
   }
   logger.debug("{} ===================> Started.", command.getName());
   super.execute(command);
 }
Example #29
0
  public void addDiscoveryListener(final DiscoveryListener listener) {
    // get the listener caught up
    for (final URI service : services.values()) {
      executor.execute(new ServiceAddedTask(listener, service));
    }

    listeners.add(listener);
  }
 public static void submitTask(String fileUrl, ImageLoadTask task) {
   Log.e(TAG, "execute a ImageLoadTask ! sWorkQueue.size() = " + mWorkQueue.size());
   if (mHashMap.get(fileUrl) == null) {
     mHashMap.put(fileUrl, task);
     mExecutor.execute(task);
   } else {
     Log.e(TAG, "there is already a task running !");
   }
 }