private static void writeRecordToFile(List<Record> records, String basePath) throws IOException { Record record = records.get(0); String filePath = basePath + File.separator + record.getId(); byte[] data = GenericUtils.serializeObject(records); FileOutputStream fileOut = null; DataOutputStream dataOut = null; try { fileOut = new FileOutputStream(filePath); dataOut = new DataOutputStream(fileOut); dataOut.write(data); } finally { if (dataOut != null) { dataOut.close(); } if (fileOut != null) { fileOut.close(); } } }
/** * Converts a DB record to Spark Row(s). Create one ore more rows from a single record. * * @param record Record to be converted to row(s) */ private Iterator<Row> recordToRows(Record record) { List<Row> tempRows = new ArrayList<Row>(); Map<String, Object> recordVals = record.getValues(); try { if (recordVals.get(AnalyticsConstants.DATA_COLUMN) != null) { String eventsJson = recordVals.get(AnalyticsConstants.DATA_COLUMN).toString(); if ((Boolean) recordVals.get(AnalyticsConstants.META_FIELD_COMPRESSED)) { eventsJson = decompress(eventsJson); } JSONObject eventsAggregated = new JSONObject(eventsJson); JSONArray eventsArray = eventsAggregated.getJSONArray(AnalyticsConstants.JSON_FIELD_EVENTS); Map<Integer, Map<String, String>> payloadsMap = null; if (eventsAggregated.has(AnalyticsConstants.JSON_FIELD_PAYLOADS)) { JSONArray payloadsArray = eventsAggregated.getJSONArray(AnalyticsConstants.JSON_FIELD_PAYLOADS); payloadsMap = getPayloadsAsMap(payloadsArray); } // Iterate over the array of events for (int i = 0; i < eventsArray.length(); i++) { // Create a row with extended fields tempRows.add( RowFactory.create( getFieldValues( eventsAggregated, eventsArray.getJSONObject(i), payloadsMap, i, record.getTimestamp()))); } } else { tempRows.add(RowFactory.create(Collections.emptyList().toArray())); } } catch (JSONException e) { throw new RuntimeException( "Error occured while splitting the record to rows: " + e.getMessage(), e); } return tempRows.iterator(); }
private static void restoreTable( AnalyticsDataService service, int tenantId, String table, File baseDir, long timeFrom, long timeTo) { try { System.out.print("Restoring table '" + table + "'.."); service.createTable(tenantId, table); File myDir = new File(baseDir.getAbsolutePath() + File.separator + table); if (!myDir.isDirectory()) { System.out.println( myDir.getAbsolutePath() + " is not a directory to contain table data, skipping."); return; } AnalyticsSchema schema = readTableSchema(baseDir.getAbsolutePath() + File.separator + table); service.setTableSchema(tenantId, table, schema); File[] files = myDir.listFiles(); int count = 0; for (File file : files) { if (file.getName().equalsIgnoreCase(TABLE_SCHEMA_FILE_NAME)) { continue; } if (count % 5000 == 0) { System.out.print("."); } if (file.isDirectory()) { System.out.println( file.getAbsolutePath() + "is a directory, which cannot contain record data, skipping."); continue; } try { List<Record> records = readRecordFromFile(file); for (Record record : records) { if (!table.equals(record.getTableName())) { System.out.println( "Invalid record, invalid table name in record compared to " + "current directory: " + record.getTableName()); } /* check timestamp range */ if (!(record.getTimestamp() >= timeFrom && record.getTimestamp() < timeTo)) { records.remove(record); } } service.put(records); if (forceIndexing) { service.waitForIndexing(INDEX_PROCESS_WAIT_TIME); } } catch (IOException e) { System.out.println( "Error in reading record data from file: " + file.getAbsoluteFile() + ", skipping."); } count++; } System.out.println(); } catch (Exception e) { e.printStackTrace(); System.out.println("Error in restoring table: " + table + " - " + e.getMessage()); } }