public static void getBlogs() throws Exception {

    BlockingQueue<String> queue = new ArrayBlockingQueue<String>(numCrawler * 4);

    CrawlerC[] crawler = new CrawlerC[numCrawler];
    for (int i = 0; i < crawler.length; i++) {
      crawler[i] = new CrawlerC(queue);
      crawler[i].start();
    }

    ResultSet rs = null;
    int offset = 1;
    while (true) {
      offset += 100;
      myStm.executeQuery("SELECT blogID from blogs where country = 'BR' LIMIT " + offset + ",100");
      System.out.println("\n---" + offset + "---");

      rs = myStm.getResultSet();
      try {
        if (!rs.first()) break;
        if (false) break;
        while (rs.next()) {
          // System.out.println(rs.getString("blogID"));
          if (!queue.offer(rs.getString("blogID"), 60, TimeUnit.SECONDS)) {
            System.out.println("Offer.Timeout");
          }
        }
      } catch (Exception e) {
      }
    }

    queue.clear();
    for (int i = 0; i < crawler.length; i++) queue.put(CrawlerC.NO_MORE_WORK);
    for (int i = 0; i < crawler.length; i++) crawler[i].join();
  }
 private int pollCompletedDownloads() {
   int tasks = 0;
   for (int i = 0; i < completedRequests.size(); i++) {
     parserService.submit(new FeedParserTask(completedRequests.poll()));
     tasks++;
   }
   return tasks;
 }
 public final T take() {
   if (!state) {
     return queue.poll();
   } else {
     try {
       return queue.take();
     } catch (InterruptedException e) {
       e.printStackTrace();
     }
     return null;
   }
 }
  @Test
  public void testIssue292() throws Exception {
    final BlockingQueue qResponse = new ArrayBlockingQueue(1);
    createSingleNodeExecutorService("testIssue292")
        .submit(
            new MemberCheck(),
            new ExecutionCallback<Member>() {
              public void onResponse(Member response) {
                qResponse.offer(response);
              }

              public void onFailure(Throwable t) {}
            });
    Object response = qResponse.poll(10, TimeUnit.SECONDS);
    assertNotNull(response);
    assertTrue(response instanceof Member);
  }
    /**
     * Waits for completed requests. Once the first request has been taken, the method will wait
     * WAIT_TIMEOUT ms longer to collect more completed requests.
     *
     * @return Collected feeds or null if the method has been interrupted during the first waiting
     *     period.
     */
    private List<Feed> collectCompletedRequests() {
      List<Feed> results = new LinkedList<Feed>();
      DownloadRequester requester = DownloadRequester.getInstance();
      int tasks = 0;

      try {
        DownloadRequest request = completedRequests.take();
        parserService.submit(new FeedParserTask(request));
        tasks++;
      } catch (InterruptedException e) {
        return null;
      }

      tasks += pollCompletedDownloads();

      isCollectingRequests = true;

      if (requester.isDownloadingFeeds()) {
        // wait for completion of more downloads
        long startTime = System.currentTimeMillis();
        long currentTime = startTime;
        while (requester.isDownloadingFeeds() && (currentTime - startTime) < WAIT_TIMEOUT) {
          try {
            if (BuildConfig.DEBUG)
              Log.d(TAG, "Waiting for " + (startTime + WAIT_TIMEOUT - currentTime) + " ms");
            sleep(startTime + WAIT_TIMEOUT - currentTime);
          } catch (InterruptedException e) {
            if (BuildConfig.DEBUG) Log.d(TAG, "interrupted while waiting for more downloads");
            tasks += pollCompletedDownloads();
          } finally {
            currentTime = System.currentTimeMillis();
          }
        }

        tasks += pollCompletedDownloads();
      }

      isCollectingRequests = false;

      for (int i = 0; i < tasks; i++) {
        try {
          Feed f = parserService.take().get();
          if (f != null) {
            results.add(f);
          }
        } catch (InterruptedException e) {
          e.printStackTrace();

        } catch (ExecutionException e) {
          e.printStackTrace();
        }
      }

      return results;
    }
 public final void put(T item) {
   if (!state) {
     throw new RuntimeException("Putting Forbidden");
   } else {
     try {
       queue.put(item);
     } catch (InterruptedException e) {
       e.printStackTrace();
     }
   }
 }
 public void run() {
   try {
     while (!Thread.interrupted()) {
       // Blocks until a course is ready
       Plate plate = filledOrders.take();
       print(this + "received " + plate + " delivering to " + plate.getOrder().getCustomer());
       plate.getOrder().getCustomer().deliver(plate);
     }
   } catch (InterruptedException e) {
     print(this + " interrupted");
   }
   print(this + " off duty");
 }
 /**
  * Cancels and clears the queue of all tasks that should not be run due to shutdown policy.
  * Invoked within super.shutdown.
  */
 @Override
 void onShutdown() {
   BlockingQueue<Runnable> q = super.getQueue();
   boolean keepDelayed = getExecuteExistingDelayedTasksAfterShutdownPolicy();
   boolean keepPeriodic = getContinueExistingPeriodicTasksAfterShutdownPolicy();
   if (!keepDelayed && !keepPeriodic) {
     for (Object e : q.toArray())
       if (e instanceof RunnableScheduledFuture<?>) ((RunnableScheduledFuture<?>) e).cancel(false);
     q.clear();
   } else {
     // Traverse snapshot to avoid iterator exceptions
     for (Object e : q.toArray()) {
       if (e instanceof RunnableScheduledFuture) {
         RunnableScheduledFuture<?> t = (RunnableScheduledFuture<?>) e;
         if ((t.isPeriodic() ? !keepPeriodic : !keepDelayed)
             || t.isCancelled()) { // also remove if already cancelled
           if (q.remove(t)) t.cancel(false);
         }
       }
     }
   }
   tryTerminate();
 }
  public void run() {
    while (true) {

      try {
        // System.out.println(r+": Take(wait)");
        // String[] info = q.take();
        String blogID = q.poll(60, TimeUnit.SECONDS);
        if (blogID == null) {
          System.out.println("Poll.Timeout");
          continue;
        }

        // System.out.println(r+": Take(get) : "+blogID);

        if (blogID == NO_MORE_WORK) {
          break;
        }

        URL feedUrl = new URL("http://www.blogger.com/feeds/" + blogID + "/comments/default");
        Query myQuery = new Query(feedUrl);
        myQuery.setMaxResults(25);

        System.out.print(r + "+,");
        Feed resultFeed = myService.query(myQuery, Feed.class);

        for (Entry entry : resultFeed.getEntries()) {
          if (entry.getAuthors().get(0).getUri() != null) {
            String profileID = entry.getAuthors().get(0).getUri().replaceAll("[^\\d]", "");
            if (profileID.length() == 20) {
              try {
                myStm.executeUpdate(
                    "INSERT IGNORE INTO author SET profileID = '" + profileID + "'");
                // System.out.print(r+"+,");
              } catch (Exception e) {
              }
            }
          }
        }

      } catch (Exception e) {
        System.out.print(r + "ERR,");
      }
    }

    System.out.println("Bye(" + r + ")");
    try {
      myStm.close();
    } catch (Exception e) {
    }
  }
    /*package*/ List<O> doJob(List<I> batch) {
      List<O> generatedBatch;
      assert lastBatchSent == false;

      if (batch == POISON_PILL) {
        lastBatch = true;
        synchronized (name) {
          pendingJobs--;
        }
        //                System.out.println(name + " - lastBatch");
        generatedBatch = Collections.emptyList();
      } else {

        EXECUTOR task = taskQueue.poll();

        boolean nextNodesAvailable = true;
        for (Node<O, ?, ?> node : nodes) {
          nextNodesAvailable &= node.isAvailable();
        }

        if (task == null) { // No available task
          resubmit(batch);
          generatedBatch = null;
        } else if (!nextNodesAvailable) { // Next nodes have to many batches.
          try {
            taskQueue.put(task);
          } catch (InterruptedException e) {
            e.printStackTrace();
          }
          resubmit(batch);
          generatedBatch = null;
        } else { // Execute

          generatedBatch = execute(task, batch);
          //                    System.out.println(name + " - end job - " + generatedBatch.size());
          for (Node<O, ?, ?> node : nodes) {
            node.submit(generatedBatch);
          }

          try {
            taskQueue.put(task);
          } catch (InterruptedException e) {
            e.printStackTrace();
          }
          synchronized (name) {
            pendingJobs--;
          }
          //                    System.out.println(name + " - pendingJobs " + pendingJobs);
        }
      }

      if (isFinished()) {
        if (!lastBatchSent) {
          for (Node<O, ?, ?> node : nodes) {
            node.submit(POISON_PILL);
          }
          lastBatchSent = true;
        }
        System.out.println("Node '" + name + "' is finished");
        synchronized (syncObject) {
          syncObject.notify();
        }
      } else {
        System.out.println("Node '" + name + "' pendingJobs " + pendingJobs);
      }
      return generatedBatch;
    }
Beispiel #11
0
 public int queueSize() {
   return changes.size();
 }
Beispiel #12
0
 @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "RV_RETURN_VALUE_IGNORED_BAD_PRACTICE")
 private void sendInterruptMarker() {
   LOG.debug("Sending interrupt marker in order to interrupt feed consumer");
   changes.offer(INTERRUPT_MARKER);
 }
Beispiel #13
0
 private void checkIfInterrupted(DocumentChange c) throws InterruptedException {
   if (c == INTERRUPT_MARKER || (!shouldRun && changes.isEmpty())) {
     throw new InterruptedException();
   }
 }
Beispiel #14
0
 public DocumentChange next(long timeout, TimeUnit unit) throws InterruptedException {
   assertRunningState();
   DocumentChange c = changes.poll(timeout, unit);
   checkIfInterrupted(c);
   return c;
 }
 public void removeItem(RecordingEpisode episode) {
   if (!RecordingMaps.remove(episode)) {
     Log.getInstance()
         .write(Log.LOGLEVEL_ERROR, "DT: Failed to remove episode from RecordingMaps.");
   }
 }
 public final synchronized void init() {
   queue.clear();
   state = true;
 }
 public final int size() {
   return queue.size();
 }
 /**
  * Returns the number of items in the download queue. Does NOT include the currently downloading
  * item, if any.
  *
  * <p>
  *
  * @return The number of items in the download queue.
  */
 public Integer getNumberOfQueuedItems() {
   Log.getInstance().write(Log.LOGLEVEL_TRACE, "DT: getNumberOfItems.");
   return RecordingMaps.size();
 }
 /**
  * Add an item to be downloaded. Details to follow....
  *
  * <p>
  *
  * @param info an array of strings ....
  * @return true if it succeeded, false otherwise.
  */
 public boolean addItem(RecordingEpisode episode) {
   Log.getInstance().write(Log.LOGLEVEL_TRACE, "DT: addItem.");
   return RecordingMaps.add(episode);
 }
  /** The main thread that does all of the downloading. */
  @Override
  public void run() {
    Log.getInstance().write(Log.LOGLEVEL_TRACE, "DT: Starting.");

    Thread.currentThread().setName("DownloadThread");

    while (!stop) {

      // Get the first item in the queue and then remove it from the queue.
      try {
        CurrentlyRecording = RecordingMaps.take();
      } catch (InterruptedException e) {
        Log.getInstance().write(Log.LOGLEVEL_WARN, "DT: Interrupted.  Terminating.");
        return;
      }

      Log.getInstance().write(Log.LOGLEVEL_TRACE, "DT: Have work to do.");

      showCurrentlyRecording(CurrentlyRecording);

      // Make sure we have enough parameters.
      if (!CurrentlyRecording.isComplete()) {
        Log.getInstance().write(Log.LOGLEVEL_ERROR, "DT: Not enough parameters.");
        CurrentlyRecording.failed();
        continue;
      }

      DownloadManager.getInstance().setCurrentlyRecordingID(CurrentlyRecording.getRequestID());

      // Get all of the RSSItems for the Feed Context.
      List<RSSItem> RSSItems = CurrentlyRecording.getRSSItems();
      if (RSSItems == null) {
        Log.getInstance().write(Log.LOGLEVEL_ERROR, "DT: null RSSItems.");
        CurrentlyRecording.failed();
        continue;
      }

      Log.getInstance()
          .write(Log.LOGLEVEL_TRACE, "DT: Found episodes for podcast = " + RSSItems.size());

      if (RSSItems.isEmpty()) {
        Log.getInstance().write(Log.LOGLEVEL_ERROR, "DT: No RSSItems.");
        CurrentlyRecording.failed();
        continue;
      }

      // Get the one ChanItem (RSSItem) we are interested in.
      RSSItem ChanItem =
          CurrentlyRecording.getItemForID(RSSItems, CurrentlyRecording.getEpisodeID());
      if (ChanItem == null) {
        Log.getInstance().write(Log.LOGLEVEL_ERROR, "DT: null ChanItem.");
        CurrentlyRecording.failed();
        continue;
      }

      // Set the ChanItem.
      CurrentlyRecording.setChanItem(ChanItem);

      // Set the fileExt instance variable.
      CurrentlyRecording.setFileExt();

      // Create the tempfile where the episode will be downloaded to.
      if (!CurrentlyRecording.setTempFile()) {
        Log.getInstance().write(Log.LOGLEVEL_ERROR, "DT: Failed to setTempFile.");
        CurrentlyRecording.failed();
        continue;
      }

      // Download the episode to the tempfile.
      if (!CurrentlyRecording.download()) {
        Log.getInstance().write(Log.LOGLEVEL_ERROR, "DT: download failed.");
        CurrentlyRecording.failed();
        continue;
      }

      // Check for 0 size download.
      if (CurrentlyRecording.isZeroSizeDownload()) {
        Log.getInstance().write(Log.LOGLEVEL_WARN, "DT: File is 0 bytes long.");
        CurrentlyRecording.failed();
        continue;
      }

      // Move the tempfile to the final location and rename it to the final name.
      if (!CurrentlyRecording.moveToFinalLocation()) {
        Log.getInstance().write(Log.LOGLEVEL_ERROR, "DT: moveToFinalLocation failed.");
        CurrentlyRecording.failed();
        continue;
      }

      // Import the episode into the Sage database as an imported media file.
      if (CurrentlyRecording.importAsAiring() == null) {
        Log.getInstance().write(Log.LOGLEVEL_ERROR, "DT: importAsMediaFile failed.");
        CurrentlyRecording.failed();
        continue;
      }

      // Force Episode to update it's Airing information.
      int AiringID = CurrentlyRecording.getAiringID();

      // It worked.
      Log.getInstance().write(Log.LOGLEVEL_TRACE, "DT: Completed successfully.");
      CurrentlyRecording.completed();
      CurrentlyRecording = null;
    } // While !stop

    Log.getInstance().write(Log.LOGLEVEL_TRACE, "DT: Fatal error. Ending.");
  } // Run
Beispiel #21
0
 public DocumentChange poll() throws InterruptedException {
   assertRunningState();
   DocumentChange c = changes.poll();
   checkIfInterrupted(c);
   return c;
 }
 public Iterator<T> iterator() {
   return queue.iterator();
 }
 public void submitCompletedDownload(DownloadRequest request) {
   completedRequests.offer(request);
   if (isCollectingRequests) {
     interrupt();
   }
 }
Beispiel #24
0
 private void handleChange(String line)
     throws IOException, InterruptedException, JsonParseException, JsonMappingException {
   changes.put(new StdDocumentChange(OBJECT_MAPPER.readTree(line)));
 }