/** Takes a single Feed, parses the corresponding file and refreshes information in the manager */ class FeedSyncThread extends Thread { private static final String TAG = "FeedSyncThread"; private BlockingQueue<DownloadRequest> completedRequests = new LinkedBlockingDeque<DownloadRequest>(); private CompletionService<Feed> parserService = new ExecutorCompletionService<Feed>(Executors.newSingleThreadExecutor()); private ExecutorService dbService = Executors.newSingleThreadExecutor(); private Future<?> dbUpdateFuture; private volatile boolean isActive = true; private volatile boolean isCollectingRequests = false; private final long WAIT_TIMEOUT = 3000; /** * Waits for completed requests. Once the first request has been taken, the method will wait * WAIT_TIMEOUT ms longer to collect more completed requests. * * @return Collected feeds or null if the method has been interrupted during the first waiting * period. */ private List<Feed> collectCompletedRequests() { List<Feed> results = new LinkedList<Feed>(); DownloadRequester requester = DownloadRequester.getInstance(); int tasks = 0; try { DownloadRequest request = completedRequests.take(); parserService.submit(new FeedParserTask(request)); tasks++; } catch (InterruptedException e) { return null; } tasks += pollCompletedDownloads(); isCollectingRequests = true; if (requester.isDownloadingFeeds()) { // wait for completion of more downloads long startTime = System.currentTimeMillis(); long currentTime = startTime; while (requester.isDownloadingFeeds() && (currentTime - startTime) < WAIT_TIMEOUT) { try { if (BuildConfig.DEBUG) Log.d(TAG, "Waiting for " + (startTime + WAIT_TIMEOUT - currentTime) + " ms"); sleep(startTime + WAIT_TIMEOUT - currentTime); } catch (InterruptedException e) { if (BuildConfig.DEBUG) Log.d(TAG, "interrupted while waiting for more downloads"); tasks += pollCompletedDownloads(); } finally { currentTime = System.currentTimeMillis(); } } tasks += pollCompletedDownloads(); } isCollectingRequests = false; for (int i = 0; i < tasks; i++) { try { Feed f = parserService.take().get(); if (f != null) { results.add(f); } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } return results; } private int pollCompletedDownloads() { int tasks = 0; for (int i = 0; i < completedRequests.size(); i++) { parserService.submit(new FeedParserTask(completedRequests.poll())); tasks++; } return tasks; } @Override public void run() { while (isActive) { final List<Feed> feeds = collectCompletedRequests(); if (feeds == null) { continue; } if (BuildConfig.DEBUG) Log.d(TAG, "Bundling " + feeds.size() + " feeds"); for (Feed feed : feeds) { removeDuplicateImages( feed); // duplicate images have to removed because the DownloadRequester does not // accept two downloads with the same download URL yet. } // Save information of feed in DB if (dbUpdateFuture != null) { try { dbUpdateFuture.get(); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } dbUpdateFuture = dbService.submit( new Runnable() { @Override public void run() { Feed[] savedFeeds = DBTasks.updateFeed( DownloadService.this, feeds.toArray(new Feed[feeds.size()])); for (Feed savedFeed : savedFeeds) { // Download Feed Image if provided and not downloaded if (savedFeed.getImage() != null && savedFeed.getImage().isDownloaded() == false) { if (BuildConfig.DEBUG) Log.d(TAG, "Feed has image; Downloading...."); savedFeed.getImage().setOwner(savedFeed); final Feed savedFeedRef = savedFeed; try { requester.downloadImage(DownloadService.this, savedFeedRef.getImage()); } catch (DownloadRequestException e) { e.printStackTrace(); DBWriter.addDownloadStatus( DownloadService.this, new DownloadStatus( savedFeedRef.getImage(), savedFeedRef.getImage().getHumanReadableIdentifier(), DownloadError.ERROR_REQUEST_ERROR, false, e.getMessage())); } } numberOfDownloads.decrementAndGet(); } sendDownloadHandledIntent(); queryDownloadsAsync(); } }); } if (dbUpdateFuture != null) { try { dbUpdateFuture.get(); } catch (InterruptedException e) { } catch (ExecutionException e) { e.printStackTrace(); } } if (BuildConfig.DEBUG) Log.d(TAG, "Shutting down"); } private class FeedParserTask implements Callable<Feed> { private DownloadRequest request; private FeedParserTask(DownloadRequest request) { this.request = request; } @Override public Feed call() throws Exception { return parseFeed(request); } } private Feed parseFeed(DownloadRequest request) { Feed savedFeed = null; Feed feed = new Feed(request.getSource(), new Date()); feed.setFile_url(request.getDestination()); feed.setId(request.getFeedfileId()); feed.setDownloaded(true); feed.setPreferences( new FeedPreferences(0, true, request.getUsername(), request.getPassword())); DownloadError reason = null; String reasonDetailed = null; boolean successful = true; FeedHandler feedHandler = new FeedHandler(); try { feed = feedHandler.parseFeed(feed).feed; if (BuildConfig.DEBUG) Log.d(TAG, feed.getTitle() + " parsed"); if (checkFeedData(feed) == false) { throw new InvalidFeedException(); } } catch (SAXException e) { successful = false; e.printStackTrace(); reason = DownloadError.ERROR_PARSER_EXCEPTION; reasonDetailed = e.getMessage(); } catch (IOException e) { successful = false; e.printStackTrace(); reason = DownloadError.ERROR_PARSER_EXCEPTION; reasonDetailed = e.getMessage(); } catch (ParserConfigurationException e) { successful = false; e.printStackTrace(); reason = DownloadError.ERROR_PARSER_EXCEPTION; reasonDetailed = e.getMessage(); } catch (UnsupportedFeedtypeException e) { e.printStackTrace(); successful = false; reason = DownloadError.ERROR_UNSUPPORTED_TYPE; reasonDetailed = e.getMessage(); } catch (InvalidFeedException e) { e.printStackTrace(); successful = false; reason = DownloadError.ERROR_PARSER_EXCEPTION; reasonDetailed = e.getMessage(); } // cleanup(); if (savedFeed == null) { savedFeed = feed; } if (successful) { return savedFeed; } else { saveDownloadStatus( new DownloadStatus( savedFeed, savedFeed.getHumanReadableIdentifier(), reason, successful, reasonDetailed)); return null; } } /** Checks if the feed was parsed correctly. */ private boolean checkFeedData(Feed feed) { if (feed.getTitle() == null) { Log.e(TAG, "Feed has no title."); return false; } if (!hasValidFeedItems(feed)) { Log.e(TAG, "Feed has invalid items"); return false; } return true; } /** * Checks if the FeedItems of this feed have images that point to the same URL. If two FeedItems * have an image that points to the same URL, the reference of the second item is removed, so * that every image reference is unique. */ private void removeDuplicateImages(Feed feed) { for (int x = 0; x < feed.getItems().size(); x++) { for (int y = x + 1; y < feed.getItems().size(); y++) { FeedItem item1 = feed.getItems().get(x); FeedItem item2 = feed.getItems().get(y); if (item1.hasItemImage() && item2.hasItemImage()) { if (StringUtils.equals( item1.getImage().getDownload_url(), item2.getImage().getDownload_url())) { item2.setImage(null); } } } } } private boolean hasValidFeedItems(Feed feed) { for (FeedItem item : feed.getItems()) { if (item.getTitle() == null) { Log.e(TAG, "Item has no title"); return false; } if (item.getPubDate() == null) { Log.e(TAG, "Item has no pubDate. Using current time as pubDate"); if (item.getTitle() != null) { Log.e(TAG, "Title of invalid item: " + item.getTitle()); } item.setPubDate(new Date()); } } return true; } /** Delete files that aren't needed anymore */ private void cleanup(Feed feed) { if (feed.getFile_url() != null) { if (new File(feed.getFile_url()).delete()) if (BuildConfig.DEBUG) Log.d(TAG, "Successfully deleted cache file."); else Log.e(TAG, "Failed to delete cache file."); feed.setFile_url(null); } else if (BuildConfig.DEBUG) { Log.d(TAG, "Didn't delete cache file: File url is not set."); } } public void shutdown() { isActive = false; if (isCollectingRequests) { interrupt(); } } public void submitCompletedDownload(DownloadRequest request) { completedRequests.offer(request); if (isCollectingRequests) { interrupt(); } } }
@SuppressLint("NewApi") @Override public void onCreate() { if (BuildConfig.DEBUG) Log.d(TAG, "Service started"); isRunning = true; handler = new Handler(); completedDownloads = Collections.synchronizedList(new ArrayList<DownloadStatus>()); downloads = new ArrayList<Downloader>(); numberOfDownloads = new AtomicInteger(0); IntentFilter cancelDownloadReceiverFilter = new IntentFilter(); cancelDownloadReceiverFilter.addAction(ACTION_CANCEL_ALL_DOWNLOADS); cancelDownloadReceiverFilter.addAction(ACTION_CANCEL_DOWNLOAD); registerReceiver(cancelDownloadReceiver, cancelDownloadReceiverFilter); syncExecutor = Executors.newSingleThreadExecutor( new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setPriority(Thread.MIN_PRIORITY); return t; } }); downloadExecutor = new ExecutorCompletionService<Downloader>( Executors.newFixedThreadPool( NUM_PARALLEL_DOWNLOADS, new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setPriority(Thread.MIN_PRIORITY); return t; } })); schedExecutor = new ScheduledThreadPoolExecutor( SCHED_EX_POOL_SIZE, new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setPriority(Thread.MIN_PRIORITY); return t; } }, new RejectedExecutionHandler() { @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { Log.w(TAG, "SchedEx rejected submission of new task"); } }); downloadCompletionThread.start(); feedSyncThread = new FeedSyncThread(); feedSyncThread.start(); setupNotificationBuilders(); requester = DownloadRequester.getInstance(); }