@Override public void addTask(Task task) { Queue queue = null; logger.info("Adding Task[" + task.toString() + "]"); if (task.getQueueName() == null || (task.getQueueName() != null && task.getQueueName().trim().length() == 0)) { queue = QueueFactory.getDefaultQueue(); } else { queue = QueueFactory.getQueue(task.getQueueName()); } logger.info("Queue to be used : " + queue.getQueueName()); if (task.getParameterName() != null && task.getParameterValue() != null) { logger.info("Adding Parameters to process"); queue.add( TaskOptions.Builder.withUrl(task.getEndpoint()) .param(task.getParameterName(), task.getParameterValue()) .taskName(task.getName())); } else { logger.info("excluding Parameters to process"); queue.add(TaskOptions.Builder.withUrl(task.getEndpoint()).taskName(task.getName())); } }
public void deleteAudioClip(String audioClipId) throws BadRequestException { AudioClip audioClip = getAudioClipById(audioClipId); datastore.delete(KeyFactory.stringToKey(audioClipId)); // task queue to delete the associated audio and image blobs Queue queue = QueueFactory.getDefaultQueue(); queue.add( TaskOptions.Builder.withUrl("/rest/users/" + audioClip.getOwnerId() + "/audioClips/audio") .method(TaskOptions.Method.DELETE) .param("blobkey", audioClip.getAudioId())); queue.add( TaskOptions.Builder.withUrl("/rest/users/" + audioClip.getOwnerId() + "/audioClips/image") .method(TaskOptions.Method.DELETE) .param("blobkey", audioClip.getImageId())); }
/** * handles the callback from the device indicating that a new data file is available. This method * will call processFile to retrieve the file and persist the data to the data store it will then * add access points for each water point in the survey responses. * * @param req */ @SuppressWarnings("rawtypes") private void ingestFile(TaskRequest req) { if (req.getFileName() != null) { log.info(" Task->processFile"); ArrayList<SurveyInstance> surveyInstances = processFile(req.getFileName(), req.getPhoneNumber(), req.getChecksum(), req.getOffset()); Map<Long, Survey> surveyMap = new HashMap<Long, Survey>(); SurveyDAO surveyDao = new SurveyDAO(); Queue summQueue = QueueFactory.getQueue("dataSummarization"); Queue defaultQueue = QueueFactory.getDefaultQueue(); for (SurveyInstance instance : surveyInstances) { Survey s = surveyMap.get(instance.getSurveyId()); if (s == null) { s = surveyDao.getById(instance.getSurveyId()); surveyMap.put(instance.getSurveyId(), s); } if (s != null && s.getRequireApproval() != null && s.getRequireApproval()) { // if the survey requires approval, don't run any of the // processors instance.setApprovedFlag("False"); continue; } else { ProcessingAction pa = dispatch(instance.getKey().getId() + ""); TaskOptions options = TaskOptions.Builder.withUrl(pa.getDispatchURL()); Iterator it = pa.getParams().keySet().iterator(); while (it.hasNext()) { options.param("key", (String) it.next()); } log.info( "Received Task Queue calls for surveyInstanceKey: " + instance.getKey().getId() + ""); aph.processSurveyInstance(instance.getKey().getId() + ""); summQueue.add( TaskOptions.Builder.withUrl("/app_worker/datasummarization") .param("objectKey", instance.getKey().getId() + "") .param("type", "SurveyInstance")); // process the "new" domain structure defaultQueue.add( TaskOptions.Builder.withUrl("/app_worker/surveyalservlet") .param( SurveyalRestRequest.ACTION_PARAM, SurveyalRestRequest.INGEST_INSTANCE_ACTION) .param( SurveyalRestRequest.SURVEY_INSTANCE_PARAM, instance.getKey().getId() + "")); } } } }
public void enqueue() { // these tasks run on the background thread... ModulesService modulesApi = ModulesServiceFactory.getModulesService(); String hostname = modulesApi.getVersionHostname(ServletConsts.BACKEND_GAE_SERVICE, null); task.header(ServletConsts.HOST, hostname); Queue queue = QueueFactory.getDefaultQueue(); queue.add(task); }
// Executed by user menu click public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { // Build a task using the TaskOptions Builder pattern from ** above Queue queue = QueueFactory.getDefaultQueue(); queue.add(TaskOptions.Builder.withUrl("/taskq_demo").method(TaskOptions.Method.POST)); resp.getWriter().println("Task have been added to default queue..."); resp.getWriter().println("Refresh this page to add another count task"); }
public static void createCatalogBackendTask(String queueName, Date date) { Queue queue = QueueFactory.getQueue(queueName); TaskOptions taskOptions = TaskOptions.Builder.withUrl("/tasks/catalog").method(TaskOptions.Method.POST); if (date != null) { taskOptions.param( "date", DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT).format(date)); } taskOptions.param("backend", Boolean.toString(true)); queue.add(taskOptions); }
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String key = request.getParameter("keyname"); String keyvalue = request.getParameter("value"); // Add the task to the default queue. Queue queue = QueueFactory.getDefaultQueue(); queue.add( TaskOptions.Builder.withUrl("/worker").param("keyname", key).param("value", keyvalue)); response.sendRedirect("/done.html"); }
/** * puts the summarization request into the queue * * @param request */ private void invokeSummarizer(DataSummarizationRequest request) { Queue queue = QueueFactory.getQueue(queueName); queue.add( TaskOptions.Builder.withUrl(summarizerPath) .param(DataSummarizationRequest.ACTION_PARAM, request.getAction()) .param(DataSummarizationRequest.OBJECT_KEY, request.getObjectKey()) .param(DataSummarizationRequest.OBJECT_TYPE, request.getType()) .param(DataSummarizationRequest.OFFSET_KEY, request.getOffset().toString()) .param( DataSummarizationRequest.CURSOR_PARAM, request.getCursor() != null ? request.getCursor() : "")); }
public static void createInitBackendTask(String queueName, Cursor cursor) { Queue queue = QueueFactory.getQueue(queueName); TaskOptions taskOptions = TaskOptions.Builder.withUrl("/tasks/init").method(TaskOptions.Method.POST); if (cursor != null && cursor.getElement() != null && cursor.getOffset() != null) { taskOptions .param("element", cursor.getElement()) .param("offset", cursor.getOffset().toString()); } if (cursor != null && cursor.backend) { taskOptions.param("backend", Boolean.toString(cursor.backend)); } log.info("Added queue " + taskOptions.toString()); queue.add(taskOptions); }
@Override protected void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { Queue queue = QueueFactory.getDefaultQueue(); // ボット定義をとりだし,それぞれの処理をタスクキューで実行 for (BotDefinition bot : BotDefinition.getBots()) { if (!bot.hasAccessToken()) // OAuthの認証に失敗しているものは処理しない continue; // queue.add(url("/botHandler").param("botId", ""+ bot.getId())); queue.add(Builder.withUrl("/botHandler").param("botId", "" + bot.getId())); logger.info("submitted a task for botId " + bot.getId()); } }
@GET @Path("/monitoring/check-for-updates") public void execute() throws Exception { TaskOptions taskOptions; Queue queue = QueueFactory.getQueue("monitoring"); for (Monitoring monitoring : MonitoringManager.findAll()) { if (!monitoring.isCompleted()) { taskOptions = Builder.withUrl("/admin/monitoring/check-for-updates"); taskOptions = taskOptions.param("clientId", monitoring.getClientId()); taskOptions = taskOptions.param("trackId", monitoring.getTrackId()); taskOptions.method(POST); queue.add(taskOptions); } } }
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { Queue queue = QueueFactory.getQueue("pesky-slow"); queue.add(withUrl("/cron/basketballscrape")); }
public void enqueue(String queueName) { // the named queue runs on the foreground thread... Queue queue = QueueFactory.getQueue(queueName); queue.add(task); }
private ArrayList<SurveyInstance> processFile( String fileName, String phoneNumber, String checksum, Integer offset) { ArrayList<SurveyInstance> surveyInstances = new ArrayList<SurveyInstance>(); try { DeviceFilesDao dfDao = new DeviceFilesDao(); URL url = new URL(DEVICE_FILE_PATH + fileName); BufferedInputStream bis = new BufferedInputStream(url.openStream()); ZipInputStream zis = new ZipInputStream(bis); List<DeviceFiles> dfList = null; DeviceFiles deviceFile = null; dfList = dfDao.listByUri(url.toURI().toString()); if (dfList != null && dfList.size() > 0) deviceFile = dfList.get(0); if (deviceFile == null) { deviceFile = new DeviceFiles(); } deviceFile.setProcessDate(getNowDateTimeFormatted()); deviceFile.setProcessedStatus(StatusCode.IN_PROGRESS); deviceFile.setURI(url.toURI().toString()); if (phoneNumber == null || phoneNumber.equals("null")) deviceFile.setPhoneNumber(null); else deviceFile.setPhoneNumber(phoneNumber); if (checksum == null || checksum.equals("null")) deviceFile.setChecksum(null); else deviceFile.setChecksum(checksum); deviceFile.setUploadDateTime(new Date()); Date collectionDate = new Date(); ArrayList<String> unparsedLines = null; try { unparsedLines = extractDataFromZip(zis); } catch (Exception iex) { // Error unzipping the response file deviceFile.setProcessedStatus(StatusCode.ERROR_INFLATING_ZIP); String message = "Error inflating device zip: " + deviceFile.getURI() + " : " + iex.getMessage(); log.log(Level.SEVERE, message); deviceFile.addProcessingMessage(message); MailUtil.sendMail( FROM_ADDRESS, "FLOW", recepientList, "Device File Processing Error: " + fileName, message); } if (unparsedLines != null && unparsedLines.size() > 0) { if (REGION_FLAG.equals(unparsedLines.get(0))) { unparsedLines.remove(0); GeoRegionHelper grh = new GeoRegionHelper(); grh.processRegionsSurvey(unparsedLines); } else { int lineNum = offset; String curId = null; while (lineNum < unparsedLines.size()) { String[] parts = unparsedLines.get(lineNum).split("\t"); if (parts.length < 5) { parts = unparsedLines.get(lineNum).split(","); } if (parts.length >= 2) { if (curId == null) { curId = parts[1]; } else { // if this isn't the first time through and // we are seeing a new id, break since we'll // process that in another call if (!curId.equals(parts[1])) { break; } } } lineNum++; } Long userID = 1L; dfDao.save(deviceFile); SurveyInstance inst = siDao.save( collectionDate, deviceFile, userID, unparsedLines.subList(offset, lineNum)); if (inst != null) { // fire a survey event SurveyEventHelper.fireEvent( SurveyEventHelper.SUBMISSION_EVENT, inst.getSurveyId(), inst.getKey().getId()); surveyInstances.add(inst); // TODO: HACK because we were saving so many duplicate // device files this way they all get the same status if (dfList != null) { for (DeviceFiles dfitem : dfList) { dfitem.setProcessedStatus(inst.getDeviceFile().getProcessedStatus()); } } } if (lineNum < unparsedLines.size()) { if (inst != null) { StatusCode processingStatus = inst.getDeviceFile().getProcessedStatus(); if (processingStatus.equals(StatusCode.PROCESSED_WITH_ERRORS)) { String message = "Error in file during first processing step. Continuing to next part"; deviceFile.addProcessingMessage(message); deviceFile.setProcessedStatus(StatusCode.IN_PROGRESS); } else { deviceFile.addProcessingMessage( "Processed " + lineNum + " lines spawning queue call"); deviceFile.setProcessedStatus(StatusCode.IN_PROGRESS); } } // if we haven't processed everything yet, invoke a // new service Queue queue = QueueFactory.getDefaultQueue(); queue.add( TaskOptions.Builder.withUrl("/app_worker/task") .param("action", "processFile") .param("fileName", fileName) .param("offset", lineNum + "")); } else { StatusCode status = StatusCode.PROCESSED_NO_ERRORS; if (deviceFile.getProcessedStatus() != null) { status = deviceFile.getProcessedStatus(); } deviceFile.setProcessedStatus(status); if (dfList != null) { for (DeviceFiles dfitem : dfList) { dfitem.setProcessedStatus(status); } } } } } else { deviceFile.setProcessedStatus(StatusCode.PROCESSED_WITH_ERRORS); String message = "Error empty file: " + deviceFile.getURI(); log.log(Level.SEVERE, message); deviceFile.addProcessingMessage(message); MailUtil.sendMail( FROM_ADDRESS, "FLOW", recepientList, "Device File Processing Error: " + fileName, DEVICE_FILE_PATH + fileName + "\n" + message); } dfDao.save(dfList); zis.close(); } catch (Exception e) { log.log(Level.SEVERE, "Could not process data file", e); MailUtil.sendMail( FROM_ADDRESS, "FLOW", recepientList, "Device File Processing Error: " + fileName, DEVICE_FILE_PATH + fileName + "\n" + (e.getMessage() != null ? e.getMessage() : "")); } return surveyInstances; }
public static ArrayList<String> extractDataFromZip(ZipInputStream zis) throws IOException, SignedDataException { ArrayList<String> lines = new ArrayList<String>(); String line = null; String surveyDataOnly = null; String dataSig = null; ZipEntry entry; while ((entry = zis.getNextEntry()) != null) { log.info("Unzipping: " + entry.getName()); ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[2048]; int size; while ((size = zis.read(buffer, 0, buffer.length)) != -1) { out.write(buffer, 0, size); } line = out.toString("UTF-8"); if (entry.getName().endsWith("txt")) { if (entry.getName().equals("regions.txt")) { lines.add("regionFlag=true"); } else { surveyDataOnly = line; } String[] linesSplit = line.split("\n"); for (String s : linesSplit) { if (s.contains("\u0000")) { s = s.replaceAll("\u0000", ""); } lines.add(s); } } else if (entry.getName().endsWith(".sig")) { dataSig = line.trim(); } else { S3Driver s3 = new S3Driver(); String[] imageParts = entry.getName().split("/"); // comment out while testing locally try { // GAEImageAdapter gaeIA = new GAEImageAdapter(); // byte[] resizedImage = // gaeIA.resizeImage(out.toByteArray(), 500, 500); // s3.uploadFile("dru-test", imageParts[1], resizedImage); GAEImageAdapter gaeImg = new GAEImageAdapter(); byte[] newImage = gaeImg.resizeImage(out.toByteArray(), 500, 500); s3.uploadFile("dru-test", imageParts[1], newImage); // add queue call to resize Queue queue = QueueFactory.getDefaultQueue(); queue.add(TaskOptions.Builder.withUrl("imageprocessor").param("imageURL", imageParts[1])); log.info("submiting image resize for imageURL: " + imageParts[1]); } catch (Exception ex) { ex.printStackTrace(); } out.close(); } zis.closeEntry(); } // check the signature if we have it if (surveyDataOnly != null && dataSig != null) { try { MessageDigest sha1Digest = MessageDigest.getInstance("SHA1"); byte[] digest = sha1Digest.digest(surveyDataOnly.getBytes("UTF-8")); SecretKeySpec signingKey = new SecretKeySpec( PropertyUtil.getProperty(SIGNING_KEY).getBytes("UTF-8"), SIGNING_ALGORITHM); Mac mac = Mac.getInstance(SIGNING_ALGORITHM); mac.init(signingKey); byte[] hmac = mac.doFinal(digest); String encodedHmac = com.google.gdata.util.common.util.Base64.encode(hmac); if (!encodedHmac.trim().equals(dataSig.trim())) { String allowUnsigned = PropertyUtil.getProperty(ALLOW_UNSIGNED); if (allowUnsigned != null && allowUnsigned.trim().equalsIgnoreCase("false")) { throw new SignedDataException( "Computed signature does not match the one submitted with the data"); } else { log.warning("Signatures don't match. Processing anyway since allow unsigned is true"); } } } catch (GeneralSecurityException e) { throw new SignedDataException("Could not calculate signature", e); } } else if (surveyDataOnly != null) { // if there is no signature, check the configuration to see if we // are allowed to proceed String allowUnsigned = PropertyUtil.getProperty(ALLOW_UNSIGNED); if (allowUnsigned != null && allowUnsigned.trim().equalsIgnoreCase("false")) { throw new SignedDataException("Datafile does not have a signature"); } } return lines; }