public void doDownload() { if (cancelled) { queue.removeDownload(this); notifyAllOnFailure(RequestFailureType.CANCELLED, null, null, "Cancelled"); return; } notifyAllDownloadStarted(); if (initiator.postFields != null) { try { downloadPost(queue.getHttpClient()); } catch (Throwable t) { BugReportActivity.handleGlobalError(initiator.context, t); } finally { queue.removeDownload(this); } } else { try { downloadGet(queue.getHttpClient()); } catch (Throwable t) { BugReportActivity.handleGlobalError(initiator.context, t); } finally { queue.removeDownload(this); finishGet(); } } }
private synchronized void reloadAccounts(final SQLiteDatabase db) { final String[] fields = new String[] {FIELD_USERNAME, FIELD_COOKIES, FIELD_MODHASH, FIELD_PRIORITY}; final Cursor cursor = db.query(TABLE, fields, null, null, null, null, FIELD_PRIORITY + " ASC"); accountsCache = new LinkedList<RedditAccount>(); defaultAccountCache = null; // TODO handle null? can this even happen? if (cursor != null) { while (cursor.moveToNext()) { final String username = cursor.getString(0); final byte[] cookies = cursor.getBlob(1); final String modhash = cursor.getString(2); final long priority = cursor.getLong(3); final RedditAccount account; try { account = new RedditAccount( username, modhash, cookies == null ? null : new PersistentCookieStore(cookies), priority); } catch (IOException e) { BugReportActivity.handleGlobalError(context, new RRError(null, null, e)); return; } accountsCache.add(account); if (defaultAccountCache == null || account.priority < defaultAccountCache.priority) { defaultAccountCache = account; } } cursor.close(); } else { BugReportActivity.handleGlobalError(context, "Cursor was null after query"); } }
// TODO potential concurrency problem -- late joiner may be added after failure public synchronized void addLateJoiner(final CacheRequest request) { if (cancelled) { request.notifyFailure(RequestFailureType.CANCELLED, null, null, "Cancelled"); return; } if (!request.setDownload(this)) { notifyAllOnFailure(RequestFailureType.CANCELLED, null, null, "Cancelled"); return; } if (request.isJson != initiator.isJson) { BugReportActivity.handleGlobalError( request.context, "Late joiner disagrees with initiator on JSON type"); return; } lateJoiners.add(request); if (request.isHigherPriorityThan(highestPriorityReq)) { highestPriorityReq = request; } if (request.isJson) { if (value != null) request.notifyJsonParseStarted(value, RRTime.utcCurrentTimeMillis(), session, false); } }
private void downloadGet(final HttpClient httpClient) { httpGet = new HttpGet(initiator.url); if (initiator.isJson) httpGet.setHeader("Accept-Encoding", "gzip"); final HttpContext localContext = new BasicHttpContext(); localContext.setAttribute(ClientContext.COOKIE_STORE, initiator.getCookies()); final HttpResponse response; final StatusLine status; try { if (cancelled) { notifyAllOnFailure(RequestFailureType.CANCELLED, null, null, "Cancelled"); return; } response = httpClient.execute(httpGet, localContext); status = response.getStatusLine(); } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure(RequestFailureType.CONNECTION, t, null, "Unable to open a connection"); return; } if (status.getStatusCode() != 200) { notifyAllOnFailure( RequestFailureType.REQUEST, null, status, String.format("HTTP error %d (%s)", status.getStatusCode(), status.getReasonPhrase())); return; } if (cancelled) { notifyAllOnFailure(RequestFailureType.CANCELLED, null, null, "Cancelled"); return; } final HttpEntity entity = response.getEntity(); if (entity == null) { notifyAllOnFailure( RequestFailureType.CONNECTION, null, status, "Did not receive a valid HTTP response"); return; } final InputStream is; try { is = entity.getContent(); mimetype = entity.getContentType() == null ? null : entity.getContentType().getValue(); } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure( RequestFailureType.CONNECTION, t, status, "Could not open an input stream"); return; } final NotifyOutputStream cacheOs; if (initiator.cache) { try { cacheFile = manager.openNewCacheFile(initiator, session, mimetype); cacheOs = cacheFile.getOutputStream(); } catch (IOException e) { e.printStackTrace(); notifyAllOnFailure(RequestFailureType.STORAGE, e, null, "Could not access the local cache"); return; } } else { cacheOs = null; } final long contentLength = entity.getContentLength(); if (initiator.isJson) { final InputStream bis; if (initiator.cache) { final CachingInputStream cis = new CachingInputStream( is, cacheOs, new CachingInputStream.BytesReadListener() { public void onBytesRead(final long total) { notifyAllOnProgress(total, contentLength); } }); bis = new BufferedInputStream(cis, 8 * 1024); } else { bis = new BufferedInputStream(is, 8 * 1024); } final JsonValue value; try { value = new JsonValue(bis); value.buildInNewThread(); } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure(RequestFailureType.PARSE, t, null, "Error parsing the JSON stream"); return; } synchronized (this) { this.value = value; notifyAllOnJsonParseStarted(value, RRTime.utcCurrentTimeMillis(), session); } try { value.join(); } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure(RequestFailureType.PARSE, t, null, "Error parsing the JSON stream"); return; } success = true; } else { if (!initiator.cache) { BugReportActivity.handleGlobalError( initiator.context, "Cache disabled for non-JSON request"); return; } try { final byte[] buf = new byte[8 * 1024]; int bytesRead; long totalBytesRead = 0; while ((bytesRead = is.read(buf)) > 0) { totalBytesRead += bytesRead; cacheOs.write(buf, 0, bytesRead); notifyAllOnProgress(totalBytesRead, contentLength); } cacheOs.flush(); cacheOs.close(); success = true; } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure( RequestFailureType.CONNECTION, t, null, "The connection was interrupted"); } } }
// TODO merge with downloadGet private void downloadPost(final HttpClient httpClient) { final HttpPost httpPost = new HttpPost(initiator.url); try { httpPost.setEntity(new UrlEncodedFormEntity(initiator.postFields, HTTP.UTF_8)); } catch (UnsupportedEncodingException e) { BugReportActivity.handleGlobalError(initiator.context, e); return; } final HttpContext localContext = new BasicHttpContext(); localContext.setAttribute(ClientContext.COOKIE_STORE, initiator.getCookies()); final HttpResponse response; final StatusLine status; try { response = httpClient.execute(httpPost, localContext); status = response.getStatusLine(); } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure(RequestFailureType.CONNECTION, t, null, "Unable to open a connection"); return; } if (status.getStatusCode() != 200) { notifyAllOnFailure( RequestFailureType.REQUEST, null, status, String.format("HTTP error %d (%s)", status.getStatusCode(), status.getReasonPhrase())); return; } final HttpEntity entity = response.getEntity(); if (entity == null) { notifyAllOnFailure( RequestFailureType.CONNECTION, null, status, "Did not receive a valid HTTP response"); return; } final InputStream is; try { is = entity.getContent(); } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure( RequestFailureType.CONNECTION, t, status, "Could not open an input stream"); return; } if (initiator.isJson) { final BufferedInputStream bis = new BufferedInputStream(is, 8 * 1024); final JsonValue value; try { value = new JsonValue(bis); value.buildInNewThread(); } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure(RequestFailureType.PARSE, t, null, "Error parsing the JSON stream"); return; } synchronized (this) { this.value = value; notifyAllOnJsonParseStarted(value, RRTime.utcCurrentTimeMillis(), session); } try { value.join(); } catch (Throwable t) { t.printStackTrace(); notifyAllOnFailure(RequestFailureType.PARSE, t, null, "Error parsing the JSON stream"); return; } success = true; } else { throw new RuntimeException("POST requests must be for JSON values"); } }