/** * Update the ranks in the csv file when the tweet is already in the base but saved with another * mark * * @param mark : note of the tweet * @param row : line of the tweet in the base * @throws IOException */ public static void updateCSV(int mark, int row) throws IOException { CSVReader reader = new CSVReader(new FileReader(AppliSettings.filename), ';'); List<String[]> csvBody = reader.readAll(); csvBody.get(row)[4] = Integer.toString(mark); reader.close(); CSVWriter writer = new CSVWriter(new FileWriter(AppliSettings.filename), ';'); writer.writeAll(csvBody); writer.flush(); writer.close(); }
/** Data cleansing method */ public void cleanseData(AmazonS3 client) throws Exception { AwsDataLoader loader = new AwsDataLoader(); CSVReader reader = null; String prefix = loader.getS3Prefix(source); client.setEndpoint(S3_ENDPOINT); S3Object object = client.getObject(new GetObjectRequest(BUCKET_NM, prefix)); reader = new CSVReader( new BufferedReader(new InputStreamReader(object.getObjectContent())), CSVParser.DEFAULT_SEPARATOR, CSVParser.DEFAULT_QUOTE_CHARACTER, CSVParser.DEFAULT_ESCAPE_CHARACTER, HEADERS_LINE); ColumnPositionMappingStrategy<ProductLanguage> strat = new ColumnPositionMappingStrategy<ProductLanguage>(); strat.setType(ProductLanguage.class); String[] columns = new String[] {"refId", "code", "name", "locale", "state", "format", "displayNameLanguage"}; strat.setColumnMapping(columns); CsvToBean<ProductLanguage> csv = new CsvToBean<ProductLanguage>(); list = csv.parse(strat, reader); System.out.println("ProductLanguageCleanser input size: " + list.size()); this.updateDataset(list); BeanToCsv<ProductLanguage> csvWriter = new BeanToCsv<ProductLanguage>(); ByteArrayOutputStream os = new ByteArrayOutputStream(); CSVWriter writer = new CSVWriter(new OutputStreamWriter(os), ',', '"'); // strat.setColumnMapping(columns); log.info("List size: " + list.size()); csvWriter.write(strat, writer, list); writer.flush(); String dataset = os.toString(); String outPrefix = PREFIX + OUTPUT_KEY + source + ".csv"; client.setEndpoint(S3_ENDPOINT); ObjectMetadata omd = new ObjectMetadata(); try { byte[] content = dataset.getBytes(StandardCharsets.UTF_8); ByteArrayInputStream input = new ByteArrayInputStream(content); BufferedReader buffReader = new BufferedReader(new InputStreamReader(input)); buffReader.readLine(); InputStream inputObj = new ReaderInputStream(buffReader); // omd.setContentLength(IOUtils.toByteArray(input).length); client.putObject(BUCKET_NM, outPrefix, inputObj, omd); input.close(); } catch (IOException e) { log.error("Axon data write to s3 failed: " + e.getMessage()); } }
public void write(final String filename, final List<Payslip> payslips) { try (CSVWriter writer = new CSVWriter(new FileWriter(filename))) { final String[] headers = "Name,Start date,End date,Gross income,Income tax,Net income,Super,Error".split(","); writer.writeNext(headers); payslips.forEach( payslip -> { final String[] record = payslip.toString().split(",", -1); writer.writeNext(record); }); writer.flush(); } catch (final IOException e) { throw new IllegalStateException("Error occurred when writing to file " + filename, e); } }
@RequestMapping("/events/{eventName}/sponsor-scan/export.csv") public void downloadSponsorScanExport( @PathVariable("eventName") String eventName, HttpServletResponse response, Principal principal) throws IOException { Event event = loadEvent(eventName, principal); List<TicketFieldConfiguration> fields = ticketFieldRepository.findAdditionalFieldsForEvent(event.getId()); response.setContentType("text/csv;charset=UTF-8"); response.setHeader( "Content-Disposition", "attachment; filename=" + eventName + "-sponsor-scan.csv"); try (ServletOutputStream out = response.getOutputStream(); CSVWriter writer = new CSVWriter(new OutputStreamWriter(out))) { for (int marker : BOM_MARKERS) { out.write(marker); } List<String> header = new ArrayList<>(); header.add("Username"); header.add("Timestamp"); header.add("Full name"); header.add("Email"); header.addAll( fields.stream().map(TicketFieldConfiguration::getName).collect(Collectors.toList())); writer.writeNext(header.toArray(new String[header.size()])); userManager .findAllUsers(principal.getName()) .stream() .map(u -> Pair.of(u, userManager.getUserRole(u))) .filter(p -> p.getRight() == Role.SPONSOR) .flatMap( p -> sponsorScanRepository .loadSponsorData( event.getId(), p.getKey().getId(), SponsorScanRepository.DEFAULT_TIMESTAMP) .stream() .map( v -> Pair.of( v, ticketFieldRepository.findAllValuesForTicketId( v.getTicket().getId())))) .map( p -> { DetailedScanData data = p.getLeft(); Map<String, String> descriptions = p.getRight(); return Pair.of( data, fields .stream() .map(x -> descriptions.getOrDefault(x.getName(), "")) .collect(Collectors.toList())); }) .map( p -> { List<String> line = new ArrayList<>(); Ticket ticket = p.getLeft().getTicket(); SponsorScan sponsorScan = p.getLeft().getSponsorScan(); line.add(userManager.findUser(sponsorScan.getUserId()).getUsername()); line.add(sponsorScan.getTimestamp().toString()); line.add(ticket.getFullName()); line.add(ticket.getEmail()); line.addAll(p.getRight()); return line.toArray(new String[line.size()]); }) .forEachOrdered(writer::writeNext); writer.flush(); out.flush(); } }
@RequestMapping("/events/{eventName}/export.csv") public void downloadAllTicketsCSV( @PathVariable("eventName") String eventName, HttpServletRequest request, HttpServletResponse response, Principal principal) throws IOException { List<String> fields = Arrays.asList( Optional.ofNullable(request.getParameterValues("fields")).orElse(new String[] {})); Event event = loadEvent(eventName, principal); Map<Integer, TicketCategory> categoriesMap = eventManager .loadTicketCategories(event) .stream() .collect(Collectors.toMap(TicketCategory::getId, Function.identity())); ZoneId eventZoneId = event.getZoneId(); Predicate<String> contains = FIXED_FIELDS::contains; response.setContentType("text/csv;charset=UTF-8"); response.setHeader("Content-Disposition", "attachment; filename=" + eventName + "-export.csv"); try (ServletOutputStream out = response.getOutputStream(); CSVWriter writer = new CSVWriter(new OutputStreamWriter(out))) { for (int marker : BOM_MARKERS) { // UGLY-MODE_ON: specify that the file is written in UTF-8 with BOM, thanks // to alexr http://stackoverflow.com/a/4192897 out.write(marker); } writer.writeNext(fields.toArray(new String[fields.size()])); eventManager .findAllConfirmedTickets(eventName, principal.getName()) .stream() .map( t -> { List<String> line = new ArrayList<>(); if (fields.contains("ID")) { line.add(t.getUuid()); } if (fields.contains("creation")) { line.add(t.getCreation().withZoneSameInstant(eventZoneId).toString()); } if (fields.contains("category")) { line.add(categoriesMap.get(t.getCategoryId()).getName()); } if (fields.contains("event")) { line.add(eventName); } if (fields.contains("status")) { line.add(t.getStatus().toString()); } if (fields.contains("originalPrice")) { line.add(MonetaryUtil.centsToUnit(t.getSrcPriceCts()).toString()); } if (fields.contains("paidPrice")) { line.add(MonetaryUtil.centsToUnit(t.getFinalPriceCts()).toString()); } if (fields.contains("discount")) { line.add(MonetaryUtil.centsToUnit(t.getDiscountCts()).toString()); } if (fields.contains("vat")) { line.add(MonetaryUtil.centsToUnit(t.getVatCts()).toString()); } if (fields.contains("reservationID")) { line.add(t.getTicketsReservationId()); } if (fields.contains("Full Name")) { line.add(t.getFullName()); } if (fields.contains("First Name")) { line.add(t.getFirstName()); } if (fields.contains("Last Name")) { line.add(t.getLastName()); } if (fields.contains("E-Mail")) { line.add(t.getEmail()); } if (fields.contains("locked")) { line.add(String.valueOf(t.getLockedAssignment())); } if (fields.contains("Language")) { line.add(String.valueOf(t.getUserLanguage())); } // obviously not optimized Map<String, String> additionalValues = ticketFieldRepository.findAllValuesForTicketId(t.getId()); fields .stream() .filter(contains.negate()) .forEachOrdered( field -> { line.add(additionalValues.getOrDefault(field, "").replaceAll("\"", "")); }); return line.toArray(new String[line.size()]); }) .forEachOrdered(writer::writeNext); writer.flush(); out.flush(); } }