@Override
 public DataSet<LocatedRow> write() throws StandardException {
   rdd.saveAsNewAPIHadoopDataset(conf); // actually does the writing
   if (operationContext.getOperation() != null) {
     operationContext.getOperation().fireAfterStatementTriggers();
   }
   ValueRow valueRow = new ValueRow(1);
   valueRow.setColumn(1, new SQLLongint(operationContext.getRecordsWritten()));
   return new SparkDataSet<>(
       SpliceSpark.getContext()
           .parallelize(Collections.singletonList(new LocatedRow(valueRow)), 1));
 }
 @Override
 public TxnView getTxn() {
   if (txn == null) return operationContext.getTxn();
   else return txn;
 }