@Override public final void writeMatrixToHDFS( MatrixBlock src, String fname, long rlen, long clen, int brlen, int bclen, long nnz) throws IOException, DMLRuntimeException { // validity check matrix dimensions if (src.getNumRows() != rlen || src.getNumColumns() != clen) { throw new IOException( "Matrix dimensions mismatch with metadata: " + src.getNumRows() + "x" + src.getNumColumns() + " vs " + rlen + "x" + clen + "."); } // prepare file access JobConf job = new JobConf(ConfigurationManager.getCachedJobConf()); FileSystem fs = FileSystem.get(job); Path path = new Path(fname); // if the file already exists on HDFS, remove it. MapReduceTool.deleteFileIfExistOnHDFS(fname); // core write (sequential/parallel) writeCSVMatrixToHDFS(path, job, fs, src, _props); IOUtilFunctions.deleteCrcFilesFromLocalFileSystem(fs, path); }
@Override public final void writeEmptyMatrixToHDFS(String fname, long rlen, long clen, int brlen, int bclen) throws IOException, DMLRuntimeException { JobConf job = new JobConf(ConfigurationManager.getCachedJobConf()); FileSystem fs = FileSystem.get(job); Path path = new Path(fname); MatrixBlock src = new MatrixBlock((int) rlen, 1, true); writeCSVMatrixToHDFS(path, job, fs, src, _props); IOUtilFunctions.deleteCrcFilesFromLocalFileSystem(fs, path); }